gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
---|---|
//
// This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.06.04 at 07:58:30 PM BST
//
package elsevier.jaxb.math.mathml;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>Java class for malignmark.type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="malignmark.type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attGroup ref="{http://www.w3.org/1998/Math/MathML}malignmark.attlist"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "malignmark.type")
@XmlRootElement(name = "malignmark")
public class Malignmark {
@XmlAttribute
protected String edge;
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
protected List<String> clazzs;
@XmlAttribute
protected String style;
@XmlAttribute
@XmlIDREF
@XmlSchemaType(name = "IDREF")
protected Object xref;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(namespace = "http://www.w3.org/1999/xlink")
@XmlSchemaType(name = "anyURI")
protected String href;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the edge property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEdge() {
if (edge == null) {
return "left";
} else {
return edge;
}
}
/**
* Sets the value of the edge property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEdge(String value) {
this.edge = value;
}
/**
* Gets the value of the clazzs property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazzs property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazzs().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazzs() {
if (clazzs == null) {
clazzs = new ArrayList<String>();
}
return this.clazzs;
}
/**
* Gets the value of the style property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStyle() {
return style;
}
/**
* Sets the value of the style property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStyle(String value) {
this.style = value;
}
/**
* Gets the value of the xref property.
*
* @return
* possible object is
* {@link Object }
*
*/
public Object getXref() {
return xref;
}
/**
* Sets the value of the xref property.
*
* @param value
* allowed object is
* {@link Object }
*
*/
public void setXref(Object value) {
this.xref = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the href property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHref() {
return href;
}
/**
* Sets the value of the href property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHref(String value) {
this.href = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
|
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.*;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
@Test(groups = "sql-findReferences")
public class SQLFindReferencesTest extends DocumentDBBaseTest {
private static final String WORKPLACE = "Workplace";
private static final String WORKER = "Worker";
private static final String CAR = "Car";
private ORID carID;
private ORID johnDoeID;
private ORID janeDoeID;
private ORID chuckNorrisID;
private ORID jackBauerID;
private ORID ctuID;
private ORID fbiID;
@Parameters(value = "url")
public SQLFindReferencesTest(@Optional String url) {
super(url);
}
@SuppressWarnings("unchecked")
@Test
public void findSimpleReference() {
Collection<ODocument> result = database.command(new OCommandSQL("find references " + carID)).execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).iterator().next(), johnDoeID);
// SUB QUERY
result = database.command(new OCommandSQL("find references ( select from " + carID + ")")).execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).iterator().next(), johnDoeID);
result = database.command(new OCommandSQL("find references " + chuckNorrisID)).execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).size(), 2);
for (OIdentifiable rid : ((Collection<OIdentifiable>) result.iterator().next().field("referredBy"))) {
Assert.assertTrue(rid.equals(ctuID) || rid.equals(fbiID));
}
result = database.command(new OCommandSQL("find references " + johnDoeID)).execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).size(), 0);
result.clear();
result = null;
}
@SuppressWarnings("unchecked")
@Test
public void findReferenceByClassAndClusters() {
Collection<ODocument> result = database.command(new OCommandSQL("find references " + janeDoeID + " [" + WORKPLACE + "]"))
.execute();
Assert.assertEquals(result.size(), 1);
Assert.assertTrue(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).iterator().next().equals(ctuID));
result = database.command(new OCommandSQL("find references " + jackBauerID + " [" + WORKPLACE + ",cluster:" + CAR + "]"))
.execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).size(), 3);
for (OIdentifiable rid : ((Collection<OIdentifiable>) result.iterator().next().field("referredBy"))) {
Assert.assertTrue(rid.equals(ctuID) || rid.equals(fbiID) || rid.equals(carID));
}
result = database.command(
new OCommandSQL("find references " + johnDoeID + " [" + WORKPLACE + "," + CAR + ",cluster:" + WORKER + "]")).execute();
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(((Collection<OIdentifiable>) result.iterator().next().field("referredBy")).size(), 0);
result.clear();
result = null;
}
@BeforeClass
public void createTestEnviroment() {
createSchema();
populateDatabase();
}
private void createSchema() {
OClass worker = database.getMetadata().getSchema().createClass(WORKER);
OClass workplace = database.getMetadata().getSchema().createClass(WORKPLACE);
OClass car = database.getMetadata().getSchema().createClass(CAR);
worker.createProperty("name", OType.STRING);
worker.createProperty("surname", OType.STRING);
worker.createProperty("colleagues", OType.LINKLIST, worker);
worker.createProperty("car", OType.LINK, car);
workplace.createProperty("name", OType.STRING);
workplace.createProperty("boss", OType.LINK, worker);
workplace.createProperty("workers", OType.LINKLIST, worker);
car.createProperty("plate", OType.STRING);
car.createProperty("owner", OType.LINK, worker);
database.getMetadata().getSchema().save();
}
private void populateDatabase() {
ODocument car = new ODocument(CAR);
car.field("plate", "JINF223S");
ODocument johnDoe = new ODocument(WORKER);
johnDoe.field("name", "John");
johnDoe.field("surname", "Doe");
johnDoe.field("car", car);
johnDoe.save();
johnDoeID = johnDoe.getIdentity().copy();
ODocument janeDoe = new ODocument(WORKER);
janeDoe.field("name", "Jane");
janeDoe.field("surname", "Doe");
janeDoe.save();
janeDoeID = janeDoe.getIdentity().copy();
ODocument chuckNorris = new ODocument(WORKER);
chuckNorris.field("name", "Chuck");
chuckNorris.field("surname", "Norris");
chuckNorris.save();
chuckNorrisID = chuckNorris.getIdentity().copy();
ODocument jackBauer = new ODocument(WORKER);
jackBauer.field("name", "Jack");
jackBauer.field("surname", "Bauer");
jackBauer.save();
jackBauerID = jackBauer.getIdentity().copy();
ODocument ctu = new ODocument(WORKPLACE);
ctu.field("name", "CTU");
ctu.field("boss", jackBauer);
List<ODocument> workplace1Workers = new ArrayList<ODocument>();
workplace1Workers.add(chuckNorris);
workplace1Workers.add(janeDoe);
ctu.field("workers", workplace1Workers);
ctu.save();
ctuID = ctu.getIdentity().copy();
ODocument fbi = new ODocument(WORKPLACE);
fbi.field("name", "FBI");
fbi.field("boss", chuckNorris);
List<ODocument> workplace2Workers = new ArrayList<ODocument>();
workplace2Workers.add(chuckNorris);
workplace2Workers.add(jackBauer);
fbi.field("workers", workplace2Workers);
fbi.save();
fbiID = fbi.getIdentity().copy();
car.field("owner", jackBauer);
car.save();
carID = car.getIdentity().copy();
}
@AfterClass
public void deleteTestEnviroment() {
database.open("admin", "admin");
carID.reset();
carID = null;
johnDoeID.reset();
johnDoeID = null;
janeDoeID.reset();
janeDoeID = null;
chuckNorrisID.reset();
chuckNorrisID = null;
jackBauerID.reset();
jackBauerID = null;
ctuID.reset();
ctuID = null;
fbiID.reset();
fbiID = null;
deleteSchema();
database.close();
}
private void deleteSchema() {
dropClass(CAR);
dropClass(WORKER);
dropClass(WORKPLACE);
}
private void dropClass(String iClass) {
OCommandSQL dropClassCommand = new OCommandSQL("drop class " + iClass);
database.command(dropClassCommand).execute();
database.getMetadata().getSchema().save();
database.getMetadata().getSchema().reload();
database.reload();
while (database.getMetadata().getSchema().existsClass(iClass)) {
database.getMetadata().getSchema().dropClass(iClass);
database.getMetadata().getSchema().save();
database.reload();
}
while (database.getClusterIdByName(iClass) > -1) {
database.dropCluster(iClass, true);
database.reload();
}
}
}
|
|
<%#
Copyright 2013-2018 the original author or authors from the JHipster project.
This file is part of the JHipster project, see http://www.jhipster.tech/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
<%_
let cacheManagerIsAvailable = false;
if (['ehcache', 'hazelcast', 'infinispan'].includes(cacheProvider) || applicationType === 'gateway') {
cacheManagerIsAvailable = true;
}
_%>
package <%= packageName %>.web.rest;
<%_ if (databaseType === 'cassandra') { _%>
import <%= packageName %>.AbstractCassandraTest;
<%_ } _%>
import <%= packageName %>.<%= mainClass %>;
<%_ if (cacheManagerIsAvailable === true) { _%>
import <%=packageName%>.config.CacheConfiguration;
<%_ } _%>
<%_ if (databaseType !== 'cassandra' && databaseType !== 'couchbase') { _%>
import <%= packageName %>.config.Constants;
import <%= packageName %>.domain.Authority;
<%_ } _%>
import <%= packageName %>.domain.User;
import <%= packageName %>.repository.UserRepository;
<%_ if (searchEngine === 'elasticsearch') { _%>
import <%= packageName %>.repository.search.UserSearchRepository;
<%_ } _%>
import <%= packageName %>.security.AuthoritiesConstants;
<%_ if (authenticationType !== 'oauth2') { _%>
import <%= packageName %>.service.MailService;<% } %>
import <%= packageName %>.service.UserService;
import <%= packageName %>.service.dto.UserDTO;
import <%= packageName %>.service.mapper.UserMapper;
<%_ if (databaseType === 'cassandra') { _%>
import <%= packageName %>.service.util.RandomUtil;
<%_ } _%>
import <%= packageName %>.web.rest.errors.ExceptionTranslator;
import <%= packageName %>.web.rest.vm.ManagedUserVM;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
<%_ if (cacheManagerIsAvailable === true) { _%>
import org.springframework.cache.CacheManager;
<%_ } _%>
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
<%_ if (databaseType === 'sql') { _%>
import org.springframework.transaction.annotation.Transactional;
<%_ } _%>
<%_ if (databaseType === 'sql') { _%>
import javax.persistence.EntityManager;
<%_ } _%>
<%_ if (databaseType !== 'cassandra') { _%>
import java.time.Instant;
<%_ } _%>
import java.util.*;
<%_ if (databaseType === 'cassandra' || databaseType === 'couchbase') { _%>
import java.util.stream.Collectors;
import java.util.stream.Stream;
<%_ } _%>
<%_ if (databaseType === 'cassandra') { _%>
import java.util.UUID;
<%_ } _%>
<%_ if (databaseType === 'couchbase') { _%>
import static <%= packageName %>.web.rest.TestUtil.mockAuthentication;
<%_ } _%>
<%_ if (enableSocialSignIn) { _%>
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
<%_ } _%>
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the UserResource REST controller.
*
* @see UserResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = <%= mainClass %>.class)
public class UserResourceIntTest <% if (databaseType === 'cassandra') { %>extends AbstractCassandraTest <% } %>{
private static final String DEFAULT_LOGIN = "johndoe";
private static final String UPDATED_LOGIN = "jhipster";
<%_ if (databaseType === 'sql') { _%>
private static final Long DEFAULT_ID = 1L;
<%_ } else if (databaseType === 'couchbase'){ _%>
private static final String DEFAULT_ID = User.PREFIX + DEFAULT_LOGIN;
<%_ } else { _%>
private static final String DEFAULT_ID = "id1";
<%_ } _%>
private static final String DEFAULT_PASSWORD = "passjohndoe";
private static final String UPDATED_PASSWORD = "passjhipster";
private static final String DEFAULT_EMAIL = "johndoe@localhost";
private static final String UPDATED_EMAIL = "jhipster@localhost";
private static final String DEFAULT_FIRSTNAME = "john";
private static final String UPDATED_FIRSTNAME = "jhipsterFirstName";
private static final String DEFAULT_LASTNAME = "doe";
private static final String UPDATED_LASTNAME = "jhipsterLastName";
<%_ if (databaseType !== 'cassandra') { _%>
private static final String DEFAULT_IMAGEURL = "http://placehold.it/50x50";
private static final String UPDATED_IMAGEURL = "http://placehold.it/40x40";
<%_ } _%>
private static final String DEFAULT_LANGKEY = "en";
private static final String UPDATED_LANGKEY = "fr";
@Autowired
private UserRepository userRepository;
<%_ if (searchEngine === 'elasticsearch') { _%>
@Autowired
private UserSearchRepository userSearchRepository;
<%_ } _%>
<%_ if (authenticationType !== 'oauth2') { _%>
@Autowired
private MailService mailService;
<%_ } _%>
@Autowired
private UserService userService;
@Autowired
private UserMapper userMapper;
@Autowired
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Autowired
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
@Autowired
private ExceptionTranslator exceptionTranslator;
<%_ if (databaseType === 'sql') { _%>
@Autowired
private EntityManager em;
<%_ } _%>
<%_ if (cacheManagerIsAvailable === true) { _%>
@Autowired
private CacheManager cacheManager;
<%_ } _%>
private MockMvc restUserMockMvc;
private User user;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
<%_ if (cacheManagerIsAvailable === true) { _%>
cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).clear();
cacheManager.getCache(UserRepository.USERS_BY_EMAIL_CACHE).clear();
<%_ } _%>
UserResource userResource = new UserResource(userRepository, userService<% if (authenticationType !== 'oauth2') { %>, mailService<% } %><% if (searchEngine === 'elasticsearch') { %>, userSearchRepository<% } %>);
this.restUserMockMvc = MockMvcBuilders.standaloneSetup(userResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setControllerAdvice(exceptionTranslator)
.setMessageConverters(jacksonMessageConverter)
.build();
}
/**
* Create a User.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which has a required relationship to the User entity.
*/
public static User createEntity(<% if (databaseType === 'sql') { %>EntityManager em<% } %>) {
User user = new User();
<%_ if (databaseType === 'cassandra') { _%>
user.setId(UUID.randomUUID().toString());
<%_ } _%>
user.setLogin(DEFAULT_LOGIN<% if (databaseType === 'sql') { %> + RandomStringUtils.randomAlphabetic(5)<% } %>);
<%_ if (authenticationType !== 'oauth2') { _%>
user.setPassword(RandomStringUtils.random(60));
<%_ } _%>
user.setActivated(true);
user.setEmail(<% if (databaseType === 'sql') { %>RandomStringUtils.randomAlphabetic(5) + <% } %>DEFAULT_EMAIL);
user.setFirstName(DEFAULT_FIRSTNAME);
user.setLastName(DEFAULT_LASTNAME);
<%_ if (databaseType !== 'cassandra') { _%>
user.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
user.setLangKey(DEFAULT_LANGKEY);
return user;
}
@Before
public void initTest() {
<%_ if (databaseType === 'couchbase') { _%>
mockAuthentication();
<%_ } _%>
<%_ if (databaseType !== 'sql') { _%>
userRepository.deleteAll();
user = createEntity();
<%_ } _%>
<%_ if (databaseType === 'sql') { _%>
user = createEntity(em);
user.setLogin(DEFAULT_LOGIN);
user.setEmail(DEFAULT_EMAIL);
<%_ } _%>
}
<%_ if (authenticationType !== 'oauth2') { _%>
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void createUser() throws Exception {
int databaseSizeBeforeCreate = userRepository.findAll().size();
// Create the User
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setLogin(DEFAULT_LOGIN);
managedUserVM.setPassword(DEFAULT_PASSWORD);
managedUserVM.setFirstName(DEFAULT_FIRSTNAME);
managedUserVM.setLastName(DEFAULT_LASTNAME);
managedUserVM.setEmail(DEFAULT_EMAIL);
managedUserVM.setActivated(true);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(DEFAULT_LANGKEY);
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(post("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isCreated());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate + 1);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getLogin()).isEqualTo(DEFAULT_LOGIN);
assertThat(testUser.getFirstName()).isEqualTo(DEFAULT_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(DEFAULT_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(DEFAULT_EMAIL);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(testUser.getImageUrl()).isEqualTo(DEFAULT_IMAGEURL);
<%_ } _%>
assertThat(testUser.getLangKey()).isEqualTo(DEFAULT_LANGKEY);
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void createUserWithExistingId() throws Exception {
int databaseSizeBeforeCreate = userRepository.findAll().size();
ManagedUserVM managedUserVM = new ManagedUserVM();
<%_ if (databaseType === 'cassandra') { _%>
managedUserVM.setId(UUID.randomUUID().toString());
<%_ } else if (databaseType === 'mongodb' || databaseType === 'couchbase') { _%>
managedUserVM.setId("1L");
<%_ } else { _%>
managedUserVM.setId(1L);
<%_ } _%>
managedUserVM.setLogin(DEFAULT_LOGIN);
managedUserVM.setPassword(DEFAULT_PASSWORD);
managedUserVM.setFirstName(DEFAULT_FIRSTNAME);
managedUserVM.setLastName(DEFAULT_LASTNAME);
managedUserVM.setEmail(DEFAULT_EMAIL);
managedUserVM.setActivated(true);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(DEFAULT_LANGKEY);
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// An entity with an existing ID cannot be created, so this API call must fail
restUserMockMvc.perform(post("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void createUserWithExistingLogin() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
int databaseSizeBeforeCreate = userRepository.findAll().size();
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setLogin(DEFAULT_LOGIN);// this login should already be used
managedUserVM.setPassword(DEFAULT_PASSWORD);
managedUserVM.setFirstName(DEFAULT_FIRSTNAME);
managedUserVM.setLastName(DEFAULT_LASTNAME);
managedUserVM.setEmail("anothermail@localhost");
managedUserVM.setActivated(true);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(DEFAULT_LANGKEY);
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Create the User
restUserMockMvc.perform(post("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void createUserWithExistingEmail() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
int databaseSizeBeforeCreate = userRepository.findAll().size();
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setLogin("anotherlogin");
managedUserVM.setPassword(DEFAULT_PASSWORD);
managedUserVM.setFirstName(DEFAULT_FIRSTNAME);
managedUserVM.setLastName(DEFAULT_LASTNAME);
managedUserVM.setEmail(DEFAULT_EMAIL);// this email should already be used
managedUserVM.setActivated(true);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(DEFAULT_LANGKEY);
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
// Create the User
restUserMockMvc.perform(post("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
<%_ } _%>
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void getAllUsers() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
// Get all the users
restUserMockMvc.perform(get("/api/users<% if (databaseType === 'sql') { %>?sort=id,desc<% } %>")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.[*].login").value(hasItem(DEFAULT_LOGIN)))
.andExpect(jsonPath("$.[*].firstName").value(hasItem(DEFAULT_FIRSTNAME)))
.andExpect(jsonPath("$.[*].lastName").value(hasItem(DEFAULT_LASTNAME)))
.andExpect(jsonPath("$.[*].email").value(hasItem(DEFAULT_EMAIL)))
<%_ if (databaseType !== 'cassandra') { _%>
.andExpect(jsonPath("$.[*].imageUrl").value(hasItem(DEFAULT_IMAGEURL)))
<%_ } _%>
.andExpect(jsonPath("$.[*].langKey").value(hasItem(DEFAULT_LANGKEY)));
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void getUser() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
<%_ if (cacheManagerIsAvailable === true) { _%>
assertThat(cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).get(user.getLogin())).isNull();
<%_ } _%>
// Get the user
restUserMockMvc.perform(get("/api/users/{id}", user.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.login").value(user.getLogin()))
.andExpect(jsonPath("$.firstName").value(DEFAULT_FIRSTNAME))
.andExpect(jsonPath("$.lastName").value(DEFAULT_LASTNAME))
.andExpect(jsonPath("$.email").value(DEFAULT_EMAIL))
<%_ if (databaseType !== 'cassandra') { _%>
.andExpect(jsonPath("$.imageUrl").value(DEFAULT_IMAGEURL))
<%_ } _%>
.andExpect(jsonPath("$.langKey").value(DEFAULT_LANGKEY));
<%_ if (cacheManagerIsAvailable === true) { _%>
assertThat(cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).get(user.getLogin())).isNotNull();
<%_ } _%>
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void getNonExistingUser() throws Exception {
restUserMockMvc.perform(get("/api/users/100000"))
.andExpect(status().isNotFound());
}
<%_ if (authenticationType !== 'oauth2') { _%>
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void updateUser() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setId(updatedUser.getId());
managedUserVM.setLogin(updatedUser.getLogin());
managedUserVM.setPassword(UPDATED_PASSWORD);
managedUserVM.setFirstName(UPDATED_FIRSTNAME);
managedUserVM.setLastName(UPDATED_LASTNAME);
managedUserVM.setEmail(UPDATED_EMAIL);
managedUserVM.setActivated(updatedUser.getActivated());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(UPDATED_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(UPDATED_LANGKEY);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setCreatedBy(updatedUser.getCreatedByUserName());
managedUserVM.setCreatedDate(updatedUser.getCreatedDate());
managedUserVM.setLastModifiedBy(updatedUser.getLastModifiedByUserName());
managedUserVM.setLastModifiedDate(updatedUser.getLastModifiedDate());
<%_ } _%>
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(put("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isOk());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeUpdate);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getFirstName()).isEqualTo(UPDATED_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(UPDATED_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(UPDATED_EMAIL);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(testUser.getImageUrl()).isEqualTo(UPDATED_IMAGEURL);
<%_ } _%>
assertThat(testUser.getLangKey()).isEqualTo(UPDATED_LANGKEY);
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void updateUserLogin() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setId(updatedUser.getId());
managedUserVM.setLogin(UPDATED_LOGIN);
managedUserVM.setPassword(UPDATED_PASSWORD);
managedUserVM.setFirstName(UPDATED_FIRSTNAME);
managedUserVM.setLastName(UPDATED_LASTNAME);
managedUserVM.setEmail(UPDATED_EMAIL);
managedUserVM.setActivated(updatedUser.getActivated());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(UPDATED_IMAGEURL);
<%_ } _%>
managedUserVM.setLangKey(UPDATED_LANGKEY);
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setCreatedBy(updatedUser.getCreatedByUserName());
managedUserVM.setCreatedDate(updatedUser.getCreatedDate());
managedUserVM.setLastModifiedBy(updatedUser.getLastModifiedByUserName());
managedUserVM.setLastModifiedDate(updatedUser.getLastModifiedDate());
<%_ } _%>
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(put("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isOk());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeUpdate);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getLogin()).isEqualTo(UPDATED_LOGIN);
assertThat(testUser.getFirstName()).isEqualTo(UPDATED_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(UPDATED_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(UPDATED_EMAIL);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(testUser.getImageUrl()).isEqualTo(UPDATED_IMAGEURL);
<%_ } _%>
assertThat(testUser.getLangKey()).isEqualTo(UPDATED_LANGKEY);
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void updateUserExistingEmail() throws Exception {
// Initialize the database with 2 users
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
User anotherUser = new User();
<%_ if (databaseType === 'cassandra') { _%>
anotherUser.setId(UUID.randomUUID().toString());
<%_ } _%>
anotherUser.setLogin("jhipster");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
anotherUser.setEmail("jhipster@localhost");
anotherUser.setFirstName("java");
anotherUser.setLastName("hipster");
<%_ if (databaseType !== 'cassandra') { _%>
anotherUser.setImageUrl("");
<%_ } _%>
anotherUser.setLangKey("en");
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(anotherUser);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(anotherUser);
<%_ } _%>
// Update the user
User updatedUser = userRepository.findOne(user.getId());
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setId(updatedUser.getId());
managedUserVM.setLogin(updatedUser.getLogin());
managedUserVM.setPassword(updatedUser.getPassword());
managedUserVM.setFirstName(updatedUser.getFirstName());
managedUserVM.setLastName(updatedUser.getLastName());
managedUserVM.setEmail("jhipster@localhost");// this email should already be used by anotherUser
managedUserVM.setActivated(updatedUser.getActivated());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(updatedUser.getImageUrl());
<%_ } _%>
managedUserVM.setLangKey(updatedUser.getLangKey());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setCreatedBy(updatedUser.getCreatedByUserName());
managedUserVM.setCreatedDate(updatedUser.getCreatedDate());
managedUserVM.setLastModifiedBy(updatedUser.getLastModifiedByUserName());
managedUserVM.setLastModifiedDate(updatedUser.getLastModifiedDate());
<%_ } _%>
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(put("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void updateUserExistingLogin() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
User anotherUser = new User();
<%_ if (databaseType === 'cassandra') { _%>
anotherUser.setId(UUID.randomUUID().toString());
<%_ } _%>
anotherUser.setLogin("jhipster");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
anotherUser.setEmail("jhipster@localhost");
anotherUser.setFirstName("java");
anotherUser.setLastName("hipster");
<%_ if (databaseType !== 'cassandra') { _%>
anotherUser.setImageUrl("");
<%_ } _%>
anotherUser.setLangKey("en");
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(anotherUser);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(anotherUser);
<%_ } _%>
// Update the user
User updatedUser = userRepository.findOne(user.getId());
ManagedUserVM managedUserVM = new ManagedUserVM();
managedUserVM.setId(updatedUser.getId());
managedUserVM.setLogin("jhipster");// this login should already be used by anotherUser
managedUserVM.setPassword(updatedUser.getPassword());
managedUserVM.setFirstName(updatedUser.getFirstName());
managedUserVM.setLastName(updatedUser.getLastName());
managedUserVM.setEmail(updatedUser.getEmail());
managedUserVM.setActivated(updatedUser.getActivated());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setImageUrl(updatedUser.getImageUrl());
<%_ } _%>
managedUserVM.setLangKey(updatedUser.getLangKey());
<%_ if (databaseType !== 'cassandra') { _%>
managedUserVM.setCreatedBy(updatedUser.getCreatedByUserName());
managedUserVM.setCreatedDate(updatedUser.getCreatedDate());
managedUserVM.setLastModifiedBy(updatedUser.getLastModifiedByUserName());
managedUserVM.setLastModifiedDate(updatedUser.getLastModifiedDate());
<%_ } _%>
managedUserVM.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
restUserMockMvc.perform(put("/api/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
}
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void deleteUser() throws Exception {
// Initialize the database
userRepository.save<% if (databaseType === 'sql') { %>AndFlush<% } %>(user);
<%_ if (searchEngine === 'elasticsearch') { _%>
userSearchRepository.save(user);
<%_ } _%>
int databaseSizeBeforeDelete = userRepository.findAll().size();
// Delete the user
restUserMockMvc.perform(delete("/api/users/{id}", user.getId())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
<%_ if (cacheManagerIsAvailable === true) { _%>
assertThat(cacheManager.getCache(UserRepository.USERS_BY_LOGIN_CACHE).get(user.getLogin())).isNull();
<%_ } _%>
// Validate the database is empty
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeDelete - 1);
}
<%_ } _%>
<%_ if (databaseType === 'sql' || databaseType === 'mongodb' || databaseType === 'couchbase') { _%>
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void getAllAuthorities() throws Exception {
restUserMockMvc.perform(get("/api/users/authorities")
.accept(TestUtil.APPLICATION_JSON_UTF8)
.contentType(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$").isArray())
.andExpect(jsonPath("$").value(containsInAnyOrder(AuthoritiesConstants.USER, AuthoritiesConstants.ADMIN)));
}
<%_ } _%>
@Test
<%_ if (databaseType === 'sql') { _%>
@Transactional
<%_ } _%>
public void testUserEquals() throws Exception {
TestUtil.equalsVerifier(User.class);
User user1 = new User();
user1.setId(<% if (databaseType === 'sql') { %>1L<% } else { %>"id1"<% } %>);
User user2 = new User();
user2.setId(user1.getId());
assertThat(user1).isEqualTo(user2);
user2.setId(<% if (databaseType === 'sql') { %>2L<% } else { %>"id2"<% } %>);
assertThat(user1).isNotEqualTo(user2);
user1.setId(null);
assertThat(user1).isNotEqualTo(user2);
}
@Test
public void testUserFromId() {
assertThat(userMapper.userFromId(DEFAULT_ID).getId()).isEqualTo(DEFAULT_ID);
assertThat(userMapper.userFromId(null)).isNull();
}
@Test
public void testUserDTOtoUser() {
UserDTO userDTO = new UserDTO();
userDTO.setId(DEFAULT_ID);
userDTO.setLogin(DEFAULT_LOGIN);
userDTO.setFirstName(DEFAULT_FIRSTNAME);
userDTO.setLastName(DEFAULT_LASTNAME);
userDTO.setEmail(DEFAULT_EMAIL);
userDTO.setActivated(true);
<%_ if (databaseType !== 'cassandra') { _%>
userDTO.setImageUrl(DEFAULT_IMAGEURL);
<%_ } _%>
userDTO.setLangKey(DEFAULT_LANGKEY);
<%_ if (databaseType !== 'cassandra') { _%>
userDTO.setCreatedBy(DEFAULT_LOGIN);
userDTO.setLastModifiedBy(DEFAULT_LOGIN);
<%_ } _%>
userDTO.setAuthorities(Collections.singleton(AuthoritiesConstants.USER));
User user = userMapper.userDTOToUser(userDTO);
assertThat(user.getId()).isEqualTo(DEFAULT_ID);
assertThat(user.getLogin()).isEqualTo(DEFAULT_LOGIN);
assertThat(user.getFirstName()).isEqualTo(DEFAULT_FIRSTNAME);
assertThat(user.getLastName()).isEqualTo(DEFAULT_LASTNAME);
assertThat(user.getEmail()).isEqualTo(DEFAULT_EMAIL);
assertThat(user.getActivated()).isEqualTo(true);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(user.getImageUrl()).isEqualTo(DEFAULT_IMAGEURL);
<%_ } _%>
assertThat(user.getLangKey()).isEqualTo(DEFAULT_LANGKEY);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(user.getCreatedBy()).isNull();
assertThat(user.getCreatedDate()).isNotNull();
assertThat(user.getLastModifiedBy()).isNull();
assertThat(user.getLastModifiedDate()).isNotNull();
<%_ } _%>
assertThat(user.getAuthorities())<% if (databaseType !== 'cassandra' && databaseType !== 'couchbase') { %>.extracting("name")<%_ } _%>.containsExactly(AuthoritiesConstants.USER);
}
@Test
public void testUserToUserDTO() {
user.setId(DEFAULT_ID);
<%_ if (databaseType === 'sql') { _%>
User other = new User();
other.setId(Constants.SYSTEM_ACCOUNT_ID);
other.setFirstName("John");
other.setLastName("Doe");
other.setLogin("johndoe");
ReflectionTestUtils.setField(user, "createdByUser", other);
ReflectionTestUtils.setField(user, "lastModifiedByUser", other);
<%_ } _%>
<%_ if (databaseType !== 'cassandra') { _%>
user.setCreatedDate(Instant.now());
user.setLastModifiedDate(Instant.now());
<%_ } _%>
<%_ if (databaseType !== 'cassandra' && databaseType !== 'couchbase') { _%>
Set<Authority> authorities = new HashSet<>();
Authority authority = new Authority();
authority.setName(AuthoritiesConstants.USER);
authorities.add(authority);
user.setAuthorities(authorities);
<%_ } else { _%>
user.setAuthorities(Stream.of(AuthoritiesConstants.USER).collect(Collectors.toSet()));
<%_ } _%>
UserDTO userDTO = userMapper.userToUserDTO(user);
assertThat(userDTO.getId()).isEqualTo(DEFAULT_ID);
assertThat(userDTO.getLogin()).isEqualTo(DEFAULT_LOGIN);
assertThat(userDTO.getFirstName()).isEqualTo(DEFAULT_FIRSTNAME);
assertThat(userDTO.getLastName()).isEqualTo(DEFAULT_LASTNAME);
assertThat(userDTO.getEmail()).isEqualTo(DEFAULT_EMAIL);
assertThat(userDTO.isActivated()).isEqualTo(true);
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(userDTO.getImageUrl()).isEqualTo(DEFAULT_IMAGEURL);
<%_ } _%>
assertThat(userDTO.getLangKey()).isEqualTo(DEFAULT_LANGKEY);
<%_ if (databaseType === 'sql') { _%>
assertThat(userDTO.getCreatedBy()).isEqualTo("John Doe (johndoe)");
assertThat(userDTO.getLastModifiedBy()).isEqualTo("John Doe (johndoe)");
<%_ } _%>
<%_ if (databaseType !== 'cassandra') { _%>
assertThat(userDTO.getCreatedDate()).isEqualTo(user.getCreatedDate());
assertThat(userDTO.getLastModifiedDate()).isEqualTo(user.getLastModifiedDate());
<%_ } _%>
assertThat(userDTO.getAuthorities()).containsExactly(AuthoritiesConstants.USER);
assertThat(userDTO.toString()).isNotNull();
}
<%_ if (databaseType === 'sql' || databaseType === 'mongodb') { _%>
@Test
public void testAuthorityEquals() throws Exception {
Authority authorityA = new Authority();
assertThat(authorityA).isEqualTo(authorityA);
assertThat(authorityA).isNotEqualTo(null);
assertThat(authorityA).isNotEqualTo(new Object());
assertThat(authorityA.hashCode()).isEqualTo(0);
assertThat(authorityA.toString()).isNotNull();
Authority authorityB = new Authority();
assertThat(authorityA).isEqualTo(authorityB);
authorityB.setName(AuthoritiesConstants.ADMIN);
assertThat(authorityA).isNotEqualTo(authorityB);
authorityA.setName(AuthoritiesConstants.USER);
assertThat(authorityA).isNotEqualTo(authorityB);
authorityB.setName(AuthoritiesConstants.USER);
assertThat(authorityA).isEqualTo(authorityB);
assertThat(authorityA.hashCode()).isEqualTo(authorityB.hashCode());
}
<%_ } _%>
}
|
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package co.realityshifters.weatheroo.data;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertTrue;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_DATE;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_DEGREES;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_HUMIDITY;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_MAX_TEMP;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_MIN_TEMP;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_PRESSURE;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_WEATHER_ID;
import static co.realityshifters.weatheroo.data.WeatherContract.WeatherEntry.COLUMN_WIND_SPEED;
import android.content.ContentValues;
import android.database.ContentObserver;
import android.database.Cursor;
import android.net.Uri;
import android.os.Handler;
import android.os.HandlerThread;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import co.realityshifters.weatheroo.utilities.SunshineDateUtils;
import co.realityshifters.weatheroo.utils.PollingCheck;
/**
* These are functions and some test data to make it easier to test your database and Content
* Provider.
* <p>
* NOTE: If your WeatherContract class doesn't exactly match ours, THIS WILL NOT WORK as we've
* provided and you will need to make changes to this code to use it to pass your tests.
*/
class TestUtilities {
/* October 1st, 2016 at midnight, GMT time */
static final long DATE_NORMALIZED = 1475280000000L;
static final int BULK_INSERT_RECORDS_TO_INSERT = 10;
/**
* Ensures there is a non empty cursor and validates the cursor's data by checking it against
* a set of expected values. This method will then close the cursor.
*
* @param error Message when an error occurs
* @param valueCursor The Cursor containing the actual values received from an arbitrary
* query
* @param expectedValues The values we expect to receive in valueCursor
*/
static void validateThenCloseCursor(String error, Cursor valueCursor,
ContentValues expectedValues) {
assertNotNull(
"This cursor is null. Did you make sure to register your ContentProvider in the "
+ "manifest?",
valueCursor);
assertTrue("Empty cursor returned. " + error, valueCursor.moveToFirst());
validateCurrentRecord(error, valueCursor, expectedValues);
valueCursor.close();
}
/**
* This method iterates through a set of expected values and makes various assertions that
* will pass if our app is functioning properly.
*
* @param error Message when an error occurs
* @param valueCursor The Cursor containing the actual values received from an arbitrary
* query
* @param expectedValues The values we expect to receive in valueCursor
*/
static void validateCurrentRecord(String error, Cursor valueCursor,
ContentValues expectedValues) {
Set<Map.Entry<String, Object>> valueSet = expectedValues.valueSet();
for (Map.Entry<String, Object> entry : valueSet) {
String columnName = entry.getKey();
int index = valueCursor.getColumnIndex(columnName);
/* Test to see if the column is contained within the cursor */
String columnNotFoundError = "Column '" + columnName + "' not found. " + error;
assertFalse(columnNotFoundError, index == -1);
/* Test to see if the expected value equals the actual value (from the Cursor) */
String expectedValue = entry.getValue().toString();
String actualValue = valueCursor.getString(index);
String valuesDontMatchError = "Actual value '" + actualValue
+ "' did not match the expected value '" + expectedValue + "'. "
+ error;
assertEquals(valuesDontMatchError,
expectedValue,
actualValue);
}
}
/**
* Used as a convenience method to return a singleton instance of ContentValues to populate
* our database or insert using our ContentProvider.
*
* @return ContentValues that can be inserted into our ContentProvider or weather.db
*/
static ContentValues createTestWeatherContentValues() {
ContentValues testWeatherValues = new ContentValues();
testWeatherValues.put(COLUMN_DATE, DATE_NORMALIZED);
testWeatherValues.put(COLUMN_DEGREES, 1.1);
testWeatherValues.put(COLUMN_HUMIDITY, 1.2);
testWeatherValues.put(COLUMN_PRESSURE, 1.3);
testWeatherValues.put(COLUMN_MAX_TEMP, 75);
testWeatherValues.put(COLUMN_MIN_TEMP, 65);
testWeatherValues.put(COLUMN_WIND_SPEED, 5.5);
testWeatherValues.put(COLUMN_WEATHER_ID, 321);
return testWeatherValues;
}
/**
* Used as a convenience method to return a singleton instance of an array of ContentValues to
* populate our database or insert using our ContentProvider's bulk insert method.
* <p>
* It is handy to have utility methods that produce test values because it makes it easy to
* compare results from ContentProviders and databases to the values you expect to receive.
* See {@link #validateCurrentRecord(String, Cursor, ContentValues)} and
* {@link #validateThenCloseCursor(String, Cursor, ContentValues)} for more information on how
* this verification is performed.
*
* @return Array of ContentValues that can be inserted into our ContentProvider or weather.db
*/
static ContentValues[] createBulkInsertTestWeatherValues() {
ContentValues[] bulkTestWeatherValues = new ContentValues[BULK_INSERT_RECORDS_TO_INSERT];
long testDate = TestUtilities.DATE_NORMALIZED;
long normalizedTestDate = SunshineDateUtils.normalizeDate(testDate);
for (int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++) {
normalizedTestDate += SunshineDateUtils.DAY_IN_MILLIS;
ContentValues weatherValues = new ContentValues();
weatherValues.put(COLUMN_DATE, normalizedTestDate);
weatherValues.put(COLUMN_DEGREES, 1.1);
weatherValues.put(COLUMN_HUMIDITY, 1.2 + 0.01 * (float) i);
weatherValues.put(COLUMN_PRESSURE, 1.3 - 0.01 * (float) i);
weatherValues.put(COLUMN_MAX_TEMP, 75 + i);
weatherValues.put(COLUMN_MIN_TEMP, 65 - i);
weatherValues.put(COLUMN_WIND_SPEED, 5.5 + 0.2 * (float) i);
weatherValues.put(COLUMN_WEATHER_ID, 321);
bulkTestWeatherValues[i] = weatherValues;
}
return bulkTestWeatherValues;
}
static TestContentObserver getTestContentObserver() {
return TestContentObserver.getTestContentObserver();
}
/**
* Students: The functions we provide inside of TestWeatherProvider use TestContentObserver to
* test
* the ContentObserver callbacks using the PollingCheck class from the Android Compatibility
* Test Suite tests.
* <p>
* NOTE: This only tests that the onChange function is called; it DOES NOT test that the
* correct Uri is returned.
*/
static class TestContentObserver extends ContentObserver {
final HandlerThread mHT;
boolean mContentChanged;
private TestContentObserver(HandlerThread ht) {
super(new Handler(ht.getLooper()));
mHT = ht;
}
static TestContentObserver getTestContentObserver() {
HandlerThread ht = new HandlerThread("ContentObserverThread");
ht.start();
return new TestContentObserver(ht);
}
/**
* Called when a content change occurs.
* <p>
* To ensure correct operation on older versions of the framework that did not provide a
* Uri argument, applications should also implement this method whenever they implement
* the {@link #onChange(boolean, Uri)} overload.
*
* @param selfChange True if this is a self-change notification.
*/
@Override
public void onChange(boolean selfChange) {
onChange(selfChange, null);
}
/**
* Called when a content change occurs. Includes the changed content Uri when available.
*
* @param selfChange True if this is a self-change notification.
* @param uri The Uri of the changed content, or null if unknown.
*/
@Override
public void onChange(boolean selfChange, Uri uri) {
mContentChanged = true;
}
/**
* Note: The PollingCheck class is taken from the Android CTS (Compatibility Test Suite).
* It's useful to look at the Android CTS source for ideas on how to test your Android
* applications. The reason that PollingCheck works is that, by default, the JUnit testing
* framework is not running on the main Android application thread.
*/
void waitForNotificationOrFail() {
new PollingCheck(5000) {
@Override
protected boolean check() {
return mContentChanged;
}
}.run();
mHT.quit();
}
}
static String getConstantNameByStringValue(Class klass, String value) {
for (Field f : klass.getDeclaredFields()) {
int modifiers = f.getModifiers();
Class<?> type = f.getType();
boolean isPublicStaticFinalString = Modifier.isStatic(modifiers)
&& Modifier.isFinal(modifiers)
&& Modifier.isPublic(modifiers)
&& type.isAssignableFrom(String.class);
if (isPublicStaticFinalString) {
String fieldName = f.getName();
try {
String fieldValue = (String) klass.getDeclaredField(fieldName).get(null);
if (fieldValue.equals(value)) return fieldName;
} catch (IllegalAccessException e) {
return null;
} catch (NoSuchFieldException e) {
return null;
}
}
}
return null;
}
static String getStaticStringField(Class clazz, String variableName)
throws NoSuchFieldException, IllegalAccessException {
Field stringField = clazz.getDeclaredField(variableName);
stringField.setAccessible(true);
String value = (String) stringField.get(null);
return value;
}
static Integer getStaticIntegerField(Class clazz, String variableName)
throws NoSuchFieldException, IllegalAccessException {
Field intField = clazz.getDeclaredField(variableName);
intField.setAccessible(true);
Integer value = (Integer) intField.get(null);
return value;
}
static String studentReadableClassNotFound(ClassNotFoundException e) {
String message = e.getMessage();
int indexBeforeSimpleClassName = message.lastIndexOf('.');
String simpleClassNameThatIsMissing = message.substring(indexBeforeSimpleClassName + 1);
simpleClassNameThatIsMissing = simpleClassNameThatIsMissing.replaceAll("\\$", ".");
String fullClassNotFoundReadableMessage = "Couldn't find the class "
+ simpleClassNameThatIsMissing
+ ".\nPlease make sure you've created that class and followed the TODOs.";
return fullClassNotFoundReadableMessage;
}
static String studentReadableNoSuchField(NoSuchFieldException e) {
String message = e.getMessage();
Pattern p = Pattern.compile("No field (\\w*) in class L.*/(\\w*\\$?\\w*);");
Matcher m = p.matcher(message);
if (m.find()) {
String missingFieldName = m.group(1);
String classForField = m.group(2).replaceAll("\\$", ".");
String fieldNotFoundReadableMessage = "Couldn't find "
+ missingFieldName + " in class " + classForField + "."
+ "\nPlease make sure you've declared that field and followed the TODOs.";
return fieldNotFoundReadableMessage;
} else {
return e.getMessage();
}
}
}
|
|
/*
* Copyright 2003-2014 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.psiutils;
import com.intellij.codeInspection.dataFlow.ControlFlowAnalyzer;
import com.intellij.codeInspection.dataFlow.MethodContract;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.util.PsiTreeUtil;
import gnu.trove.THashSet;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.Predicate;
public class SideEffectChecker {
private static final Set<String> ourSideEffectFreeClasses = new THashSet<>(Arrays.asList(
Object.class.getName(),
Short.class.getName(),
Character.class.getName(),
Byte.class.getName(),
Integer.class.getName(),
Long.class.getName(),
Float.class.getName(),
Double.class.getName(),
String.class.getName(),
StringBuffer.class.getName(),
Boolean.class.getName(),
ArrayList.class.getName(),
Date.class.getName(),
HashMap.class.getName(),
HashSet.class.getName(),
Hashtable.class.getName(),
LinkedHashMap.class.getName(),
LinkedHashSet.class.getName(),
LinkedList.class.getName(),
Stack.class.getName(),
TreeMap.class.getName(),
TreeSet.class.getName(),
Vector.class.getName(),
WeakHashMap.class.getName()));
private SideEffectChecker() {
}
public static boolean mayHaveSideEffects(@NotNull PsiExpression exp) {
final SideEffectsVisitor visitor = new SideEffectsVisitor(null, exp);
exp.accept(visitor);
return visitor.mayHaveSideEffects();
}
public static boolean mayHaveSideEffects(@NotNull PsiElement element, Predicate<PsiElement> shouldIgnoreElement) {
final SideEffectsVisitor visitor = new SideEffectsVisitor(null, element, shouldIgnoreElement);
element.accept(visitor);
return visitor.mayHaveSideEffects();
}
public static boolean checkSideEffects(@NotNull PsiExpression element, @NotNull List<PsiElement> sideEffects) {
final SideEffectsVisitor visitor = new SideEffectsVisitor(sideEffects, element);
element.accept(visitor);
return visitor.mayHaveSideEffects();
}
public static List<PsiExpression> extractSideEffectExpressions(@NotNull PsiExpression element) {
List<PsiElement> list = new ArrayList<>();
element.accept(new SideEffectsVisitor(list, element));
return StreamEx.of(list).select(PsiExpression.class).toList();
}
private static class SideEffectsVisitor extends JavaRecursiveElementWalkingVisitor {
private final @Nullable List<PsiElement> mySideEffects;
private final @NotNull PsiElement myStartElement;
private final @NotNull Predicate<PsiElement> myIgnorePredicate;
boolean found;
SideEffectsVisitor(@Nullable List<PsiElement> sideEffects, @NotNull PsiElement startElement) {
this(sideEffects, startElement, call -> false);
}
SideEffectsVisitor(@Nullable List<PsiElement> sideEffects, @NotNull PsiElement startElement, @NotNull Predicate<PsiElement> predicate) {
myStartElement = startElement;
myIgnorePredicate = predicate;
mySideEffects = sideEffects;
}
private boolean addSideEffect(PsiElement element) {
if (myIgnorePredicate.test(element)) return false;
found = true;
if(mySideEffects != null) {
mySideEffects.add(element);
} else {
stopWalking();
}
return true;
}
@Override
public void visitAssignmentExpression(@NotNull PsiAssignmentExpression expression) {
if (addSideEffect(expression)) return;
super.visitAssignmentExpression(expression);
}
@Override
public void visitMethodCallExpression(@NotNull PsiMethodCallExpression expression) {
final PsiMethod method = expression.resolveMethod();
if (!isPure(method)) {
if (addSideEffect(expression)) return;
}
super.visitMethodCallExpression(expression);
}
protected boolean isPure(PsiMethod method) {
if (method == null) return false;
if (PropertyUtil.isSimpleGetter(method)) return true;
return ControlFlowAnalyzer.isPure(method) && !mayHaveExceptionalSideEffect(method);
}
@Override
public void visitNewExpression(@NotNull PsiNewExpression expression) {
if(!isSideEffectFreeConstructor(expression)) {
if (addSideEffect(expression)) return;
}
super.visitNewExpression(expression);
}
@Override
public void visitUnaryExpression(@NotNull PsiUnaryExpression expression) {
final IElementType tokenType = expression.getOperationTokenType();
if (tokenType.equals(JavaTokenType.PLUSPLUS) || tokenType.equals(JavaTokenType.MINUSMINUS)) {
if (addSideEffect(expression)) return;
}
super.visitUnaryExpression(expression);
}
@Override
public void visitVariable(PsiVariable variable) {
if (addSideEffect(variable)) return;
super.visitVariable(variable);
}
@Override
public void visitBreakStatement(PsiBreakStatement statement) {
PsiStatement exitedStatement = statement.findExitedStatement();
if (exitedStatement != null && PsiTreeUtil.isAncestor(myStartElement, exitedStatement, true)) return;
if (addSideEffect(statement)) return;
super.visitBreakStatement(statement);
}
@Override
public void visitClass(PsiClass aClass) {
// local or anonymous class declaration is not side effect per se (unless it's instantiated)
}
@Override
public void visitContinueStatement(PsiContinueStatement statement) {
PsiStatement exitedStatement = statement.findContinuedStatement();
if (exitedStatement != null && PsiTreeUtil.isAncestor(myStartElement, exitedStatement, false)) return;
if (addSideEffect(statement)) return;
super.visitContinueStatement(statement);
}
@Override
public void visitReturnStatement(PsiReturnStatement statement) {
if (addSideEffect(statement)) return;
super.visitReturnStatement(statement);
}
@Override
public void visitThrowStatement(PsiThrowStatement statement) {
if (addSideEffect(statement)) return;
super.visitThrowStatement(statement);
}
@Override
public void visitLambdaExpression(PsiLambdaExpression expression) {
// lambda is not side effect per se (unless it's called)
}
public boolean mayHaveSideEffects() {
return found;
}
}
/**
* Returns true if given method function is likely to throw an exception (e.g. "assertEquals"). In some cases this means that
* the method call should be preserved in source code even if it's pure (i.e. does not change the program state).
*
* @param method a method to check
* @return true if the method has exceptional side effect
*/
public static boolean mayHaveExceptionalSideEffect(PsiMethod method) {
if (method.getName().startsWith("assert") || method.getName().startsWith("check")) {
return true;
}
return ControlFlowAnalyzer.getMethodCallContracts(method, null).stream()
.filter(mc -> mc.getConditions().stream().noneMatch(cv -> cv.isBoundCheckingCondition()))
.anyMatch(mc -> mc.getReturnValue() == MethodContract.ValueConstraint.THROW_EXCEPTION);
}
private static boolean isSideEffectFreeConstructor(@NotNull PsiNewExpression newExpression) {
PsiJavaCodeReferenceElement classReference = newExpression.getClassReference();
PsiClass aClass = classReference == null ? null : (PsiClass)classReference.resolve();
String qualifiedName = aClass == null ? null : aClass.getQualifiedName();
if (qualifiedName == null) return false;
if (ourSideEffectFreeClasses.contains(qualifiedName)) return true;
PsiFile file = aClass.getContainingFile();
PsiDirectory directory = file.getContainingDirectory();
PsiPackage classPackage = directory == null ? null : JavaDirectoryService.getInstance().getPackage(directory);
String packageName = classPackage == null ? null : classPackage.getQualifiedName();
// all Throwable descendants from java.lang are side effects free
if (CommonClassNames.DEFAULT_PACKAGE.equals(packageName) || "java.io".equals(packageName)) {
PsiClass throwableClass = JavaPsiFacade.getInstance(aClass.getProject()).findClass("java.lang.Throwable", aClass.getResolveScope());
if (throwableClass != null && com.intellij.psi.util.InheritanceUtil.isInheritorOrSelf(aClass, throwableClass, true)) {
return true;
}
}
return false;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.pdf;
// Java...
import java.util.List;
/**
* class representing a PDF Smooth Shading object.
*
* PDF Functions represent parameterized mathematical formulas and sampled representations with
* arbitrary resolution. Functions are used in two areas: device-dependent
* rasterization information for halftoning and transfer
* functions, and color specification for smooth shading (a PDF 1.3 feature).
*
* All PDF Functions have a shadingType (0,2,3, or 4), a Domain, and a Range.
*/
public class PDFShading extends PDFObject {
// Guts common to all function types
/**
* The name of the Shading e.g. "Shading1"
*/
protected String shadingName = null;
/**
* Required: The Type of shading (1,2,3,4,5,6,7)
*/
protected int shadingType = 3; // Default
/**
* A ColorSpace representing the colorspace. "DeviceRGB" is an example.
*/
protected PDFDeviceColorSpace colorSpace = null;
/**
* The background color. Since shading is opaque,
* this is very rarely used.
*/
protected List background = null;
/**
* Optional: A List specifying the clipping rectangle
*/
protected List bBox = null;
/**
* Optional: A flag whether or not to filter the shading function
* to prevent aliasing artifacts. Default is false.
*/
protected boolean antiAlias = false;
/**
* Optional for Type 1: Array of four numbers, xmin, xmax, ymin, ymax.
* Default is [0 1 0 1]
* Optional for Type 2: An array of two numbers between which the blend
* varies between start and end points. Default is 0, 1.
* Optional for Type 3: An array of two numbers between which the blend
* varies between start and end points. Default is 0, 1.
*/
protected List domain = null;
/**
* Optional for Type 1: A transformation matrix
*/
protected List matrix = null;
/**
* Required for Type 1, 2, and 3:
* The object of the color mapping function (usually type 2 or 3).
* Optional for Type 4,5,6, and 7: When it's nearly the same thing.
*/
protected PDFFunction function = null;
/**
* Required for Type 2: An Array of four numbers specifying
* the starting and ending coordinate pairs
* Required for Type 3: An Array of six numbers [x0,y0,r0,x1,y1,r1]
* specifying the centers and radii of
* the starting and ending circles.
*/
protected List coords = null;
/**
* Required for Type 2+3: An Array of two boolean values specifying
* whether to extend the start and end colors past the start
* and end points, respectively.
* Default is false, false.
*/
protected List extend = null;
/**
* Required for Type 4,5,6, and 7: Specifies the number of bits used
* to represent each vertex coordinate.
* Allowed to be 1,2,4,8,12,16,24, or 32.
*/
protected int bitsPerCoordinate = 0;
/**
* Required for Type 4,5,6, and 7: Specifies the number of bits used
* to represent the edge flag for each vertex.
* Allowed to be 2,4,or 8, while the Edge flag itself is allowed to
* be 0,1 or 2.
*/
protected int bitsPerFlag = 0;
/**
* Required for Type 4,5,6, and 7: Array of Doubles which specifies
* how to decode coordinate and color component values.
* Each type has a differing number of decode array members, so check
* the spec.
* Page 303 in PDF Spec 1.3
*/
protected List decode = null;
/**
* Required for Type 4,5,6, and 7: Specifies the number of bits used
* to represent each color coordinate.
* Allowed to be 1,2,4,8,12, or 16
*/
protected int bitsPerComponent = 0;
/**
* Required for Type 5:The number of vertices in each "row" of
* the lattice; it must be greater than or equal to 2.
*/
protected int verticesPerRow = 0;
/**
* Constructor for type function based shading
*
* @param theShadingType The type of shading object, which should be 1 for function
* based shading.
* @param theColorSpace The colorspace is 'DeviceRGB' or something similar.
* @param theBackground An array of color components appropriate to the
* colorspace key specifying a single color value.
* This key is used by the f operator buy ignored by the sh operator.
* @param theBBox List of double's representing a rectangle
* in the coordinate space that is current at the
* time of shading is imaged. Temporary clipping
* boundary.
* @param theAntiAlias Whether or not to anti-alias.
* @param theDomain Optional vector of Doubles specifying the domain.
* @param theMatrix List of Doubles specifying the matrix.
* If it's a pattern, then the matrix maps it to pattern space.
* If it's a shading, then it maps it to current user space.
* It's optional, the default is the identity matrix
* @param theFunction The PDF Function that maps an (x,y) location to a color
*/
public PDFShading( // CSOK: ParameterNumber
int theShadingType, PDFDeviceColorSpace theColorSpace,
List theBackground, List theBBox,
boolean theAntiAlias, List theDomain,
List theMatrix, PDFFunction theFunction) {
super();
this.shadingType = theShadingType; // 1
this.colorSpace = theColorSpace;
this.background = theBackground;
this.bBox = theBBox;
this.antiAlias = theAntiAlias;
this.domain = theDomain;
this.matrix = theMatrix;
this.function = theFunction;
}
/**
* Constructor for Type 2 and 3
*
* @param theShadingType 2 or 3 for axial or radial shading
* @param theColorSpace "DeviceRGB" or similar.
* @param theBackground theBackground An array of color components appropriate to the
* colorspace key specifying a single color value.
* This key is used by the f operator buy ignored by the sh operator.
* @param theBBox List of double's representing a rectangle
* in the coordinate space that is current at the
* time of shading is imaged. Temporary clipping
* boundary.
* @param theAntiAlias Default is false
* @param theCoords List of four (type 2) or 6 (type 3) Double
* @param theDomain List of Doubles specifying the domain
* @param theFunction the Stitching (PDFfunction type 3) function,
* even if it's stitching a single function
* @param theExtend List of Booleans of whether to extend the start
* and end colors past the start and end points
* The default is [false, false]
*/
public PDFShading( // CSOK: ParameterNumber
int theShadingType, PDFDeviceColorSpace theColorSpace,
List theBackground, List theBBox,
boolean theAntiAlias, List theCoords,
List theDomain, PDFFunction theFunction,
List theExtend) {
super();
this.shadingType = theShadingType; // 2 or 3
this.colorSpace = theColorSpace;
this.background = theBackground;
this.bBox = theBBox;
this.antiAlias = theAntiAlias;
this.coords = theCoords;
this.domain = theDomain;
this.function = theFunction;
this.extend = theExtend;
}
/**
* Constructor for Type 4,6, or 7
*
* @param theShadingType 4, 6, or 7 depending on whether it's
* Free-form gouraud-shaded triangle meshes, coons patch meshes,
* or tensor product patch meshes, respectively.
* @param theColorSpace "DeviceRGB" or similar.
* @param theBackground theBackground An array of color components appropriate to the
* colorspace key specifying a single color value.
* This key is used by the f operator buy ignored by the sh operator.
* @param theBBox List of double's representing a rectangle
* in the coordinate space that is current at the
* time of shading is imaged. Temporary clipping
* boundary.
* @param theAntiAlias Default is false
* @param theBitsPerCoordinate 1,2,4,8,12,16,24 or 32.
* @param theBitsPerComponent 1,2,4,8,12, and 16
* @param theBitsPerFlag 2,4,8.
* @param theDecode List of Doubles see PDF 1.3 spec pages 303 to 312.
* @param theFunction the PDFFunction
*/
public PDFShading( // CSOK: ParameterNumber
int theShadingType, PDFDeviceColorSpace theColorSpace,
List theBackground, List theBBox,
boolean theAntiAlias, int theBitsPerCoordinate,
int theBitsPerComponent, int theBitsPerFlag,
List theDecode, PDFFunction theFunction) {
super();
this.shadingType = theShadingType; // 4,6 or 7
this.colorSpace = theColorSpace;
this.background = theBackground;
this.bBox = theBBox;
this.antiAlias = theAntiAlias;
this.bitsPerCoordinate = theBitsPerCoordinate;
this.bitsPerComponent = theBitsPerComponent;
this.bitsPerFlag = theBitsPerFlag;
this.decode = theDecode;
this.function = theFunction;
}
/**
* Constructor for type 5
*
* @param theShadingType 5 for lattice-Form Gouraud shaded-triangle mesh
* @param theColorSpace "DeviceRGB" or similar.
* @param theBackground theBackground An array of color components appropriate to the
* colorspace key specifying a single color value.
* This key is used by the f operator buy ignored by the sh operator.
* @param theBBox List of double's representing a rectangle
* in the coordinate space that is current at the
* time of shading is imaged. Temporary clipping
* boundary.
* @param theAntiAlias Default is false
* @param theBitsPerCoordinate 1,2,4,8,12,16, 24, or 32
* @param theBitsPerComponent 1,2,4,8,12,24,32
* @param theDecode List of Doubles. See page 305 in PDF 1.3 spec.
* @param theVerticesPerRow number of vertices in each "row" of the lattice.
* @param theFunction The PDFFunction that's mapped on to this shape
*/
public PDFShading( // CSOK: ParameterNumber
int theShadingType, PDFDeviceColorSpace theColorSpace,
List theBackground, List theBBox,
boolean theAntiAlias, int theBitsPerCoordinate,
int theBitsPerComponent, List theDecode,
int theVerticesPerRow, PDFFunction theFunction) {
super();
this.shadingType = theShadingType; // 5
this.colorSpace = theColorSpace;
this.background = theBackground;
this.bBox = theBBox;
this.antiAlias = theAntiAlias;
this.bitsPerCoordinate = theBitsPerCoordinate;
this.bitsPerComponent = theBitsPerComponent;
this.decode = theDecode;
this.verticesPerRow = theVerticesPerRow;
this.function = theFunction;
}
/**
* Get the name of this shading.
*
* @return the name of the shading
*/
public String getName() {
return (this.shadingName);
}
/**
* Sets the name of the shading
* @param name the name of the shading pattern. Can be anything
* without spaces. "Shading1" or "Sh1" are good examples.
*/
public void setName(String name) {
if (name.indexOf(" ") >= 0) {
throw new IllegalArgumentException(
"Shading name must not contain any spaces");
}
this.shadingName = name;
}
/**
* represent as PDF. Whatever the shadingType is, the correct
* representation spits out. The sets of required and optional
* attributes are different for each type, but if a required
* attribute's object was constructed as null, then no error
* is raised. Instead, the malformed PDF that was requested
* by the construction is dutifully output.
* This policy should be reviewed.
*
* @return the PDF string.
*/
public String toPDFString() { // CSOK: MethodLength
int vectorSize;
int tempInt;
StringBuffer p = new StringBuffer(128);
p.append(getObjectID()
+ "<< \n/ShadingType " + this.shadingType + " \n");
if (this.colorSpace != null) {
p.append("/ColorSpace /"
+ this.colorSpace.getName() + " \n");
}
if (this.background != null) {
p.append("/Background [ ");
vectorSize = this.background.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.background.get(tempInt))
+ " ");
}
p.append("] \n");
}
if (this.bBox
!= null) { // I've never seen an example, so I guess this is right.
p.append("/BBox [ ");
vectorSize = this.bBox.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.bBox.get(tempInt))
+ " ");
}
p.append("] \n");
}
if (this.antiAlias) {
p.append("/AntiAlias " + this.antiAlias + " \n");
}
// Here's where we differentiate based on what type it is.
if (this.shadingType == 1) { // function based shading
if (this.domain != null) {
p.append("/Domain [ ");
vectorSize = this.domain.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.domain.get(tempInt))
+ " ");
}
p.append("] \n");
} else {
p.append("/Domain [ 0 1 ] \n");
}
if (this.matrix != null) {
p.append("/Matrix [ ");
vectorSize = this.matrix.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.matrix.get(tempInt))
+ " ");
}
p.append("] \n");
}
if (this.function != null) {
p.append("/Function ");
p.append(this.function.referencePDF() + " \n");
}
} else if ((this.shadingType == 2)
|| (this.shadingType
== 3)) { // 2 is axial shading (linear gradient)
// 3 is radial shading (circular gradient)
if (this.coords != null) {
p.append("/Coords [ ");
vectorSize = this.coords.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.coords.get(tempInt))
+ " ");
}
p.append("] \n");
}
// DOMAIN
if (this.domain != null) {
p.append("/Domain [ ");
vectorSize = this.domain.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(PDFNumber.doubleOut((Double)this.domain.get(tempInt))
+ " ");
}
p.append("] \n");
} else {
p.append("/Domain [ 0 1 ] \n");
}
if (this.extend != null) {
p.append("/Extend [ ");
vectorSize = this.extend.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(((Boolean)this.extend.get(tempInt)) + " ");
}
p.append("] \n");
} else {
p.append("/Extend [ true true ] \n");
}
if (this.function != null) {
p.append("/Function ");
p.append(this.function.referencePDF() + " \n");
}
} else if ((this.shadingType == 4) || (this.shadingType == 6)
|| (this.shadingType
== 7)) { // 4:Free-form Gouraud-shaded triangle meshes
// 6:coons patch meshes
// 7://tensor product patch meshes (which no one ever uses)
if (this.bitsPerCoordinate > 0) {
p.append("/BitsPerCoordinate " + this.bitsPerCoordinate
+ " \n");
} else {
p.append("/BitsPerCoordinate 1 \n");
}
if (this.bitsPerComponent > 0) {
p.append("/BitsPerComponent " + this.bitsPerComponent
+ " \n");
} else {
p.append("/BitsPerComponent 1 \n");
}
if (this.bitsPerFlag > 0) {
p.append("/BitsPerFlag " + this.bitsPerFlag + " \n");
} else {
p.append("/BitsPerFlag 2 \n");
}
if (this.decode != null) {
p.append("/Decode [ ");
vectorSize = this.decode.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(((Boolean)this.decode.get(tempInt)) + " ");
}
p.append("] \n");
}
if (this.function != null) {
p.append("/Function ");
p.append(this.function.referencePDF() + " \n");
}
} else if (this.shadingType
== 5) { // Lattice Free form gouraud-shaded triangle mesh
if (this.bitsPerCoordinate > 0) {
p.append("/BitsPerCoordinate " + this.bitsPerCoordinate
+ " \n");
} else {
p.append("/BitsPerCoordinate 1 \n");
}
if (this.bitsPerComponent > 0) {
p.append("/BitsPerComponent " + this.bitsPerComponent
+ " \n");
} else {
p.append("/BitsPerComponent 1 \n");
}
if (this.decode != null) {
p.append("/Decode [ ");
vectorSize = this.decode.size();
for (tempInt = 0; tempInt < vectorSize; tempInt++) {
p.append(((Boolean)this.decode.get(tempInt)) + " ");
}
p.append("] \n");
}
if (this.function != null) {
p.append("/Function ");
p.append(this.function.referencePDF() + " \n");
}
if (this.verticesPerRow > 0) {
p.append("/VerticesPerRow " + this.verticesPerRow + " \n");
} else {
p.append("/VerticesPerRow 2 \n");
}
}
p.append(">> \nendobj\n");
return (p.toString());
}
/** {@inheritDoc} */
protected boolean contentEquals(PDFObject obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (!(obj instanceof PDFShading)) {
return false;
}
PDFShading shad = (PDFShading)obj;
if (shadingType != shad.shadingType) {
return false;
}
if (antiAlias != shad.antiAlias) {
return false;
}
if (bitsPerCoordinate != shad.bitsPerCoordinate) {
return false;
}
if (bitsPerFlag != shad.bitsPerFlag) {
return false;
}
if (bitsPerComponent != shad.bitsPerComponent) {
return false;
}
if (verticesPerRow != shad.verticesPerRow) {
return false;
}
if (colorSpace != null) {
if (!colorSpace.equals(shad.colorSpace)) {
return false;
}
} else if (shad.colorSpace != null) {
return false;
}
if (background != null) {
if (!background.equals(shad.background)) {
return false;
}
} else if (shad.background != null) {
return false;
}
if (bBox != null) {
if (!bBox.equals(shad.bBox)) {
return false;
}
} else if (shad.bBox != null) {
return false;
}
if (domain != null) {
if (!domain.equals(shad.domain)) {
return false;
}
} else if (shad.domain != null) {
return false;
}
if (matrix != null) {
if (!matrix.equals(shad.matrix)) {
return false;
}
} else if (shad.matrix != null) {
return false;
}
if (coords != null) {
if (!coords.equals(shad.coords)) {
return false;
}
} else if (shad.coords != null) {
return false;
}
if (extend != null) {
if (!extend.equals(shad.extend)) {
return false;
}
} else if (shad.extend != null) {
return false;
}
if (decode != null) {
if (!decode.equals(shad.decode)) {
return false;
}
} else if (shad.decode != null) {
return false;
}
if (function != null) {
if (!function.equals(shad.function)) {
return false;
}
} else if (shad.function != null) {
return false;
}
return true;
}
}
|
|
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.backend.builder.service;
import java.util.Collection;
import java.util.Map;
import java.util.function.Consumer;
import org.guvnor.common.services.project.builder.model.BuildResults;
import org.guvnor.common.services.project.builder.model.IncrementalBuildResults;
import org.guvnor.common.services.project.model.Module;
import org.guvnor.common.services.project.service.DeploymentMode;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.services.backend.builder.ala.BuildPipelineInvoker;
import org.kie.workbench.common.services.backend.builder.ala.LocalBinaryConfig;
import org.kie.workbench.common.services.backend.builder.ala.LocalBuildConfig;
import org.kie.workbench.common.services.backend.builder.core.DeploymentVerifier;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.uberfire.backend.vfs.Path;
import org.uberfire.workbench.events.ResourceChange;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class BuildServiceHelperTest {
@Mock
private BuildPipelineInvoker pipelineInvoker;
@Mock
private DeploymentVerifier deploymentVerifier;
private BuildServiceHelper serviceHelper;
@Mock
private Module module;
@Mock
private LocalBinaryConfig localBinaryConfig;
@Mock
private BuildResults buildResults;
@Mock
private IncrementalBuildResults incrementalBuildResults;
@Mock
private Map<Path, Collection<ResourceChange>> resourceChanges;
@Mock
private Path resource;
private BuildPipelineInvoker.LocalBuildRequest expectedRequest;
@Before
public void setUp() {
serviceHelper = new BuildServiceHelper(pipelineInvoker,
deploymentVerifier);
}
@Test
public void testLocalBuild() {
prepareLocalFullBuild();
when(localBinaryConfig.getBuildResults()).thenReturn(buildResults);
BuildResults result = serviceHelper.localBuild(module);
assertEquals(buildResults,
result);
verify(pipelineInvoker,
times(1)).invokeLocalBuildPipeLine(eq(expectedRequest),
any(Consumer.class));
}
@Test
public void testLocalBuildWithConsumer() {
prepareLocalFullBuild();
serviceHelper.localBuild(module,
new Consumer<LocalBinaryConfig>() {
@Override
public void accept(LocalBinaryConfig result) {
// the returned result should the same as the localBinaryConfig produced by the PipelineInvoker.
assertEquals(localBinaryConfig,
result);
}
});
verify(pipelineInvoker,
times(1)).invokeLocalBuildPipeLine(eq(expectedRequest),
any(Consumer.class));
}
private void prepareLocalFullBuild() {
expectedRequest = BuildPipelineInvoker.LocalBuildRequest.newFullBuildRequest(module);
preparePipelineInvocation(expectedRequest);
}
@Test
public void testLocalBuildAndDeployForced() {
prepareBuildAndDeploy(module,
LocalBuildConfig.DeploymentType.FORCED,
false);
BuildResults result = serviceHelper.localBuildAndDeploy(module,
DeploymentMode.FORCED,
false);
verifyBuildAndDeploy(result);
}
@Test
public void testLocalBuildAndDeployValidated() {
prepareBuildAndDeploy(module,
LocalBuildConfig.DeploymentType.VALIDATED,
false);
BuildResults result = serviceHelper.localBuildAndDeploy(module,
DeploymentMode.VALIDATED,
false);
verifyBuildAndDeploy(result);
}
private void prepareBuildAndDeploy(Module module,
LocalBuildConfig.DeploymentType deploymentType,
boolean suppressHandlers) {
expectedRequest = BuildPipelineInvoker.LocalBuildRequest.newFullBuildAndDeployRequest(module,
deploymentType,
suppressHandlers);
preparePipelineInvocation(expectedRequest);
when(localBinaryConfig.getBuildResults()).thenReturn(buildResults);
}
private void verifyBuildAndDeploy(BuildResults result) {
assertEquals(buildResults,
result);
verify(pipelineInvoker,
times(1)).invokeLocalBuildPipeLine(eq(expectedRequest),
any(Consumer.class));
}
@Test
public void testLocalBuildWithAddResource() {
testLocalBuildWithResource(module,
LocalBuildConfig.BuildType.INCREMENTAL_ADD_RESOURCE,
resource);
}
@Test
public void testLocalBuildWithDeleteResource() {
testLocalBuildWithResource(module,
LocalBuildConfig.BuildType.INCREMENTAL_DELETE_RESOURCE,
resource);
}
@Test
public void testLocalBuildWithUpdateResource() {
testLocalBuildWithResource(module,
LocalBuildConfig.BuildType.INCREMENTAL_UPDATE_RESOURCE,
resource);
}
private void testLocalBuildWithResource(Module module,
LocalBuildConfig.BuildType buildType,
Path resource) {
BuildPipelineInvoker.LocalBuildRequest buildRequest =
BuildPipelineInvoker.LocalBuildRequest.newIncrementalBuildRequest(module,
buildType,
resource);
preparePipelineInvocation(buildRequest);
when(localBinaryConfig.getIncrementalBuildResults()).thenReturn(incrementalBuildResults);
IncrementalBuildResults result = serviceHelper.localBuild(module,
buildType,
resource);
assertEquals(incrementalBuildResults,
result);
verify(pipelineInvoker,
times(1)).invokeLocalBuildPipeLine(eq(buildRequest),
any(Consumer.class));
}
@Test
public void testLocalBuildWithResourceChanges() {
BuildPipelineInvoker.LocalBuildRequest buildRequest =
BuildPipelineInvoker.LocalBuildRequest.newIncrementalBuildRequest(module,
resourceChanges);
preparePipelineInvocation(buildRequest);
when(localBinaryConfig.getIncrementalBuildResults()).thenReturn(incrementalBuildResults);
IncrementalBuildResults result = serviceHelper.localBuild(module,
resourceChanges);
assertEquals(incrementalBuildResults,
result);
verify(pipelineInvoker,
times(1)).invokeLocalBuildPipeLine(eq(buildRequest),
any(Consumer.class));
}
private void preparePipelineInvocation(BuildPipelineInvoker.LocalBuildRequest buildRequest) {
//emulate the pipeline invocation with the desired params.
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock invocation) {
Consumer consumer = (Consumer) invocation.getArguments()[1];
consumer.accept(localBinaryConfig);
return null;
}
}).when(pipelineInvoker).invokeLocalBuildPipeLine(eq(buildRequest),
any(Consumer.class));
}
}
|
|
/*
* Copyright (c) 2014 Oculus Info Inc.
* http://www.oculusinfo.com/
*
* Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oculusinfo.binning.visualization;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.InputStream;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.BorderFactory;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.json.JSONObject;
import com.oculusinfo.binning.TileIndex;
import com.oculusinfo.binning.TilePyramid;
import com.oculusinfo.binning.impl.AOITilePyramid;
import com.oculusinfo.binning.impl.WebMercatorTilePyramid;
import com.oculusinfo.binning.io.PyramidIO;
import com.oculusinfo.binning.metadata.PyramidMetaData;
import com.oculusinfo.binning.util.AvroJSONConverter;
public class JsonTileVisualizer extends JFrame {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(BinVisualizer.class.getName());
static final String PYRAMID_IO = "pyramidIo";
public static void main (String[] args) {
new JsonTileVisualizer().setVisible(true);
}
private static enum IOEnum {
File,
HBase,
SQLite,
ZipStream
}
private static enum PyramidEnum {
Geographic,
AreaOfInterest
}
private JSONObject _tile;
private JTextArea _tileVis;
private PyramidIO _pyramidIO;
private TilePyramid _pyramid;
private String _pyramidId;
private GroupLayout _layout;
private JPanel _tileChooser;
private JFileChooser _fileChooser;
private JComboBox<IOEnum> _ioField;
private JPanel _ioSelectorContainer;
private PyramidIOSelector _ioSelector;
private JComboBox<PyramidEnum> _pyramidField;
private JLabel _pyramidDesc;
private JTextField _idField;
private JComboBox<Integer> _levelField;
private JComboBox<Integer> _xField;
private JComboBox<Integer> _yField;
private JButton _show;
private JCheckBox _showText;
public JsonTileVisualizer () {
setupMenus();
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setLocation(200, 50);
setSize(1200, 1000);
_tile = null;
_tileVis = new JTextArea();
_pyramidIO = null;
_pyramid = null;
_pyramidId = null;
_fileChooser = new JFileChooser();
createTileChooser();
JSplitPane split = new JSplitPane();
split.setResizeWeight(0.8);
split.setLeftComponent(new JScrollPane(_tileVis));
split.setRightComponent(_tileChooser);
getContentPane().setLayout(new BorderLayout());
getContentPane().add(split, BorderLayout.CENTER);
}
private void setupMenus () {
JMenuBar menuBar = new JMenuBar();
JMenu fileMenu = new JMenu("File");
fileMenu.setMnemonic('f');
JMenuItem exit = new JMenuItem("Exit");
exit.setMnemonic('x');
exit.addActionListener(new ActionListener() {
@Override
public void actionPerformed (ActionEvent e) {
System.exit(0);
}
});
fileMenu.add(exit);
menuBar.add(fileMenu);
setJMenuBar(menuBar);
}
private void createTileChooser () {
JLabel ioLabel = new JLabel("I/O type:");
ioLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_ioField = new JComboBox<>(IOEnum.values());
_ioField.addActionListener(new IOFieldUpdate());
_ioSelectorContainer = new JPanel();
_ioSelectorContainer.setMaximumSize(new Dimension(100, 75));
_ioSelectorContainer.setLayout(new BorderLayout());
_ioSelectorContainer.setBorder(BorderFactory.createLineBorder(Color.BLACK, 1));
_ioSelector = null;
JLabel pyramidLabel = new JLabel("Pyramid type:");
pyramidLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_pyramidField = new JComboBox<>(PyramidEnum.values());
_pyramidField.setEnabled(false);
_pyramidDesc = new JLabel();
_pyramidDesc.setHorizontalAlignment(SwingConstants.RIGHT);
JLabel idLabel = new JLabel("Pyramid id:");
idLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_idField = new JTextField();
_idField.getDocument().addDocumentListener(new IDUpdate());
JLabel levelLabel = new JLabel("Zoom level:");
levelLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_levelField = new JComboBox<>();
_levelField.addActionListener(new LevelUpdate());
JLabel xLabel = new JLabel("Tile x coordinate:");
xLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_xField = new JComboBox<>();
JLabel yLabel = new JLabel("Tile y coordinate:");
yLabel.setHorizontalAlignment(SwingConstants.RIGHT);
_yField = new JComboBox<>();
_show = new JButton("Show tile");
_show.addActionListener(new ShowTile());
_showText = new JCheckBox("Bin values");
_showText.setHorizontalAlignment(SwingConstants.RIGHT);
JPanel chooser = new JPanel();
_layout = new GroupLayout(chooser);
chooser.setLayout(_layout);
int pref = GroupLayout.PREFERRED_SIZE;
int max = Short.MAX_VALUE;
JLabel extraArea = new JLabel();
_layout.setHorizontalGroup(
_layout.createParallelGroup()
.addGroup(_layout.createSequentialGroup().addComponent(ioLabel, 0, pref, max).addComponent(_ioField))
.addGroup(_layout.createSequentialGroup().addGap(25).addComponent(_ioSelectorContainer, 0, pref, max))
.addGroup(_layout.createSequentialGroup().addComponent(pyramidLabel, 0, pref, max).addComponent(_pyramidField))
.addGroup(_layout.createSequentialGroup().addComponent(_pyramidDesc, 0, pref, max))
.addGroup(_layout.createSequentialGroup().addComponent(idLabel, 0, pref, max).addComponent(_idField))
.addGroup(_layout.createSequentialGroup().addComponent(levelLabel, 0, pref, max).addComponent(_levelField))
.addGroup(_layout.createSequentialGroup().addComponent(xLabel, 0, pref, max).addComponent(_xField))
.addGroup(_layout.createSequentialGroup().addComponent(yLabel, 0, pref, max).addComponent(_yField))
.addComponent(_show, Alignment.TRAILING)
.addComponent(_showText, Alignment.TRAILING)
.addComponent(extraArea)
);
_layout.setVerticalGroup(
_layout.createSequentialGroup()
.addGroup(_layout.createParallelGroup().addComponent(ioLabel).addComponent(_ioField))
.addComponent(_ioSelectorContainer, 0, pref, 100)
.addGroup(_layout.createParallelGroup().addComponent(pyramidLabel).addComponent(_pyramidField))
.addGroup(_layout.createParallelGroup().addComponent(_pyramidDesc))
.addGroup(_layout.createParallelGroup().addComponent(idLabel).addComponent(_idField))
.addGroup(_layout.createParallelGroup().addComponent(levelLabel).addComponent(_levelField))
.addGroup(_layout.createParallelGroup().addComponent(xLabel).addComponent(_xField))
.addGroup(_layout.createParallelGroup().addComponent(yLabel).addComponent(_yField))
.addComponent(_show)
.addComponent(_showText)
.addComponent(extraArea, GroupLayout.PREFERRED_SIZE, GroupLayout.PREFERRED_SIZE, Short.MAX_VALUE)
);
_layout.linkSize(_ioField, _pyramidField, _idField, _levelField, _xField, _yField);
_tileChooser = chooser;
_ioField.setSelectedIndex(0);
_pyramidField.setSelectedIndex(0);
}
private void setIOType (IOEnum type) {
switch (type) {
case File:
if (null != _ioSelector) {
if (_ioSelector instanceof FileSystemPyramidIOSelector) {
return;
}
_ioSelectorContainer.removeAll();
}
_ioSelector = new FileSystemPyramidIOSelector(_fileChooser);
_ioSelectorContainer.add(_ioSelector.getPanel(), BorderLayout.CENTER);
break;
case HBase:
if (null != _ioSelector) {
if (_ioSelector instanceof HBasePyramidIOSelector) {
return;
}
_ioSelectorContainer.removeAll();
}
_ioSelector = new HBasePyramidIOSelector();
_ioSelectorContainer.add(_ioSelector.getPanel(), BorderLayout.CENTER);
break;
case SQLite:
if (null != _ioSelector) {
if (_ioSelector instanceof SQLitePyramidIOSelector) {
return;
}
_ioSelectorContainer.removeAll();
}
_ioSelector = new SQLitePyramidIOSelector();
_ioSelectorContainer.add(_ioSelector.getPanel(), BorderLayout.CENTER);
break;
case ZipStream:
if (null != _ioSelector) {
if (_ioSelector instanceof ZipFilePyramidIOSelector) {
return;
}
_ioSelectorContainer.removeAll();
}
_ioSelector = new ZipFilePyramidIOSelector(_fileChooser);
_ioSelectorContainer.add(_ioSelector.getPanel(), BorderLayout.CENTER);
break;
default:
if (null != _ioSelector) {
_ioSelectorContainer.removeAll();
_ioSelector = null;
}
}
// _layout.layoutContainer(_tileChooser);
_ioSelector.getPanel().addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange (PropertyChangeEvent event) {
if (PYRAMID_IO.equals(event.getPropertyName())) {
_pyramidIO = _ioSelector.getPyramidIO();
updateAvailableLevels();
}
}
});
_tileChooser.validate();
}
private void setPyramidId (String newId) {
if (!objectsEqual(newId, _pyramidId)) {
_pyramidId = newId;
// Notify change functions
updateAvailableLevels();
updatePyramidType();
}
}
private void updateAvailableLevels () {
if (null != _pyramidIO && null != _pyramidId && !_pyramidId.isEmpty()) {
try {
String rawMetaData = _pyramidIO.readMetaData(_pyramidId);
PyramidMetaData metaData = new PyramidMetaData(rawMetaData);
_levelField.removeAll();
_xField.removeAll();
_yField.removeAll();
List<Integer> levels = metaData.getValidZoomLevels();
for (Integer level: levels) {
_levelField.addItem(level);
}
_levelField.setSelectedIndex(0);
return;
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error getting level metadata for "
+ _pyramidId);
}
}
_levelField.removeAllItems();
_xField.removeAll();
_yField.removeAll();
}
private void updatePyramidType () {
_pyramid = null;
try {
String rawMetaData = _pyramidIO.readMetaData(_pyramidId);
PyramidMetaData metaData = new PyramidMetaData(rawMetaData);
_pyramid = metaData.getTilePyramid();
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error getting level metadata for "
+ _pyramidId);
}
if (null == _pyramid) {
_pyramidField.setSelectedIndex(-1);
_pyramidDesc.setText("");
} else if (_pyramid instanceof WebMercatorTilePyramid) {
_pyramidField.setSelectedItem(PyramidEnum.Geographic);
_pyramidDesc.setText("");
} else if (_pyramid instanceof AOITilePyramid) {
_pyramidField.setSelectedItem(PyramidEnum.AreaOfInterest);
Rectangle2D bounds = _pyramid.getTileBounds(new TileIndex(0, 0, 0));
_pyramidDesc.setText(String.format("bounds: [%.4f, %.4f] to [%.4f, %.4f]",
bounds.getMinX(), bounds.getMinY(),
bounds.getMaxX(), bounds.getMaxY()));
}
}
private void updateAvailableCoordinates () {
Integer level = (Integer) _levelField.getSelectedItem();
_xField.removeAllItems();
_yField.removeAllItems();
if (null == level) return;
int pow2 = 1 << level;
for (int i=0; i<pow2; ++i) {
_xField.addItem(i);
_yField.addItem(i);
}
}
private void showCurrentTile () {
if (null == _pyramidId) return;
if (null == _pyramidIO) return;
if (null == _levelField.getSelectedItem()) return;
if (null == _xField.getSelectedItem()) return;
if (null == _yField.getSelectedItem()) return;
TileIndex index = new TileIndex((Integer) _levelField.getSelectedItem(),
(Integer) _xField.getSelectedItem(),
(Integer) _yField.getSelectedItem());
try {
InputStream tileStream = _pyramidIO.getTileStream(_pyramidId, null, index);
_tile = AvroJSONConverter.convert(tileStream);
if (null == _tile) {
_tileVis.setText("");
} else {
_tileVis.setText(_tile.toString(2));
}
} catch (Exception e) {
_tileVis.setText("");
}
}
private class IOFieldUpdate implements ActionListener {
@Override
public void actionPerformed (ActionEvent event) {
setIOType((IOEnum) _ioField.getSelectedItem());
}
}
private class IDUpdate implements DocumentListener {
@Override
public void changedUpdate (DocumentEvent event) {
setPyramidId(_idField.getText());
}
@Override
public void insertUpdate (DocumentEvent e) {
setPyramidId(_idField.getText());
}
@Override
public void removeUpdate (DocumentEvent e) {
setPyramidId(_idField.getText());
}
}
private class LevelUpdate implements ActionListener {
@Override
public void actionPerformed (ActionEvent event) {
updateAvailableCoordinates();
}
}
private class ShowTile implements ActionListener {
@Override
public void actionPerformed (ActionEvent e) {
showCurrentTile();
}
}
private static boolean objectsEqual (Object a, Object b) {
if (null == a) return null == b;
return a.equals(b);
}
}
|
|
package org.scribble.cli;
import java.io.File;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.scribble.cli.CommandLine.ArgFlag;
// String[] -> Map<CommandLine.Arg, String[]> -- Map array values are the arguments associated to each CommandLine.Arg
public class CommandLineArgParser
{
// Unique flags
public static final String JUNIT_FLAG = "-junit"; // For internal use (JUnit test harness)
public static final String VERBOSE_FLAG = "-V";
public static final String IMPORT_PATH_FLAG = "-ip";
public static final String API_OUTPUT_DIR_FLAG = "-d";
public static final String STATECHAN_SUBTYPES_FLAG = "-subtypes";
public static final String OLD_WF_FLAG = "-oldwf";
public static final String NO_LIVENESS_FLAG = "-nolive";
public static final String LTSCONVERT_MIN_FLAG = "-minlts";
public static final String FAIR_FLAG = "-fair";
public static final String NO_LOCAL_CHOICE_SUBJECT_CHECK = "-nolocalchoicecheck";
public static final String NO_ACCEPT_CORRELATION_CHECK = "-nocorrelation";
public static final String DOT_FLAG = "-dot";
public static final String AUT_FLAG = "-aut";
public static final String NO_VALIDATION_FLAG = "-novalid";
public static final String NO_MODULE_NAME_CHECK_FLAG = "-nomodnamecheck";
public static final String INLINE_MAIN_MOD_FLAG = "-inline";
public static final String F17_FLAG = "-f17";
// Non-unique flags
public static final String PROJECT_FLAG = "-project";
public static final String EFSM_FLAG = "-fsm";
public static final String VALIDATION_EFSM_FLAG = "-vfsm";
public static final String UNFAIR_EFSM_FLAG = "-ufsm";
public static final String EFSM_PNG_FLAG = "-fsmpng";
public static final String VALIDATION_EFSM_PNG_FLAG = "-vfsmpng";
public static final String UNFAIR_EFSM_PNG_FLAG = "-ufsmpng";
public static final String SGRAPH_FLAG = "-model";
public static final String UNFAIR_SGRAPH_FLAG = "-umodel";
public static final String SGRAPH_PNG_FLAG = "-modelpng";
public static final String UNFAIR_SGRAPH_PNG_FLAG = "-umodelpng";
public static final String API_GEN_FLAG = "-api";
public static final String SESSION_API_GEN_FLAG = "-sessapi";
public static final String STATECHAN_API_GEN_FLAG = "-chanapi";
private static final Map<String, CommandLine.ArgFlag> UNIQUE_FLAGS = new HashMap<>();
{
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.JUNIT_FLAG, CommandLine.ArgFlag.JUNIT);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.VERBOSE_FLAG, CommandLine.ArgFlag.VERBOSE);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.IMPORT_PATH_FLAG, CommandLine.ArgFlag.IMPORT_PATH);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.API_OUTPUT_DIR_FLAG, CommandLine.ArgFlag.API_OUTPUT);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.STATECHAN_SUBTYPES_FLAG, CommandLine.ArgFlag.SCHAN_API_SUBTYPES);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.OLD_WF_FLAG, CommandLine.ArgFlag.OLD_WF);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.LTSCONVERT_MIN_FLAG, CommandLine.ArgFlag.LTSCONVERT_MIN);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.FAIR_FLAG, CommandLine.ArgFlag.FAIR);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.NO_LOCAL_CHOICE_SUBJECT_CHECK, CommandLine.ArgFlag.NO_LOCAL_CHOICE_SUBJECT_CHECK);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.NO_ACCEPT_CORRELATION_CHECK, CommandLine.ArgFlag.NO_ACCEPT_CORRELATION_CHECK);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.DOT_FLAG, CommandLine.ArgFlag.DOT);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.AUT_FLAG, CommandLine.ArgFlag.AUT);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.NO_VALIDATION_FLAG, CommandLine.ArgFlag.NO_VALIDATION);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.NO_MODULE_NAME_CHECK_FLAG, CommandLine.ArgFlag.NO_MODULE_NAME_CHECK);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.INLINE_MAIN_MOD_FLAG, CommandLine.ArgFlag.INLINE_MAIN_MOD);
CommandLineArgParser.UNIQUE_FLAGS.put(CommandLineArgParser.F17_FLAG, CommandLine.ArgFlag.F17);
}
private static final Map<String, CommandLine.ArgFlag> NON_UNIQUE_FLAGS = new HashMap<>();
{
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.PROJECT_FLAG, CommandLine.ArgFlag.PROJECT);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.EFSM_FLAG, CommandLine.ArgFlag.EFSM);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.VALIDATION_EFSM_FLAG, CommandLine.ArgFlag.VALIDATION_EFSM);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.UNFAIR_EFSM_FLAG, CommandLine.ArgFlag.UNFAIR_EFSM);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.EFSM_PNG_FLAG, CommandLine.ArgFlag.EFSM_PNG);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.VALIDATION_EFSM_PNG_FLAG, CommandLine.ArgFlag.VALIDATION_EFSM_PNG);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.UNFAIR_EFSM_PNG_FLAG, CommandLine.ArgFlag.UNFAIR_EFSM_PNG);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.SGRAPH_FLAG, CommandLine.ArgFlag.SGRAPH);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.UNFAIR_SGRAPH_FLAG, CommandLine.ArgFlag.UNFAIR_SGRAPH);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.SGRAPH_PNG_FLAG, CommandLine.ArgFlag.SGRAPH_PNG);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.UNFAIR_SGRAPH_PNG_FLAG, CommandLine.ArgFlag.UNFAIR_SGRAPH_PNG);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.API_GEN_FLAG, CommandLine.ArgFlag.API_GEN);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.SESSION_API_GEN_FLAG, CommandLine.ArgFlag.SESS_API_GEN);
CommandLineArgParser.NON_UNIQUE_FLAGS.put(CommandLineArgParser.STATECHAN_API_GEN_FLAG, CommandLine.ArgFlag.SCHAN_API_GEN);
}
private static final Map<String, CommandLine.ArgFlag> FLAGS = new HashMap<>();
{
CommandLineArgParser.FLAGS.putAll(CommandLineArgParser.UNIQUE_FLAGS);
CommandLineArgParser.FLAGS.putAll(CommandLineArgParser.NON_UNIQUE_FLAGS);
}
private final String[] args;
private final Map<CommandLine.ArgFlag, String[]> parsed = new HashMap<>();
public CommandLineArgParser(String[] args) throws CommandLineException
{
this.args = args;
parseArgs();
}
public Map<CommandLine.ArgFlag, String[]> getArgs()
{
return this.parsed;
}
private void parseArgs() throws CommandLineException
{
for (int i = 0; i < this.args.length; i++)
{
String arg = this.args[i];
if (CommandLineArgParser.FLAGS.containsKey(arg))
{
i = this.parseFlag(i);
}
else
{
if (isMainModuleParsed())
{
if (arg.startsWith("-"))
{
throw new CommandLineException("Unknown flag or bad main module arg: " + arg);
}
// May actually be the second bad argument -- we didn't validate the value of the main arg
throw new CommandLineException("Bad/multiple main module arg: " + arg);
}
parseMain(i);
}
}
}
private boolean isMainModuleParsed()
{
return this.parsed.containsKey(CommandLine.ArgFlag.MAIN_MOD) || this.parsed.containsKey(CommandLine.ArgFlag.INLINE_MAIN_MOD);
}
// Pre: i is the index of the current flag to parse
// Post: i is the index of the last argument parsed -- parseArgs does the index increment to the next current flag
// Currently allows repeat flag decls: next overrides previous
private int parseFlag(int i) throws CommandLineException
{
String flag = this.args[i];
switch (flag)
{
// Unique flags
case CommandLineArgParser.IMPORT_PATH_FLAG:
{
return parseImportPath(i);
}
case CommandLineArgParser.INLINE_MAIN_MOD_FLAG:
{
if (isMainModuleParsed())
{
throw new CommandLineException("Multiple main modules given.");
}
return parseInlineMainModule(i);
}
case CommandLineArgParser.F17_FLAG:
{
return parseF17(i);
}
case CommandLineArgParser.JUNIT_FLAG:
case CommandLineArgParser.VERBOSE_FLAG:
case CommandLineArgParser.STATECHAN_SUBTYPES_FLAG:
case CommandLineArgParser.OLD_WF_FLAG:
case CommandLineArgParser.NO_LIVENESS_FLAG:
case CommandLineArgParser.LTSCONVERT_MIN_FLAG:
case CommandLineArgParser.FAIR_FLAG:
case CommandLineArgParser.NO_LOCAL_CHOICE_SUBJECT_CHECK:
case CommandLineArgParser.NO_ACCEPT_CORRELATION_CHECK:
case CommandLineArgParser.NO_VALIDATION_FLAG:
case CommandLineArgParser.NO_MODULE_NAME_CHECK_FLAG:
{
checkAndAddNoArgUniqueFlag(flag, new String[0]);
return i;
}
case CommandLineArgParser.API_OUTPUT_DIR_FLAG:
{
return parseOutput(i);
}
case CommandLineArgParser.DOT_FLAG:
{
if (this.parsed.containsKey(CommandLineArgParser.UNIQUE_FLAGS.get(AUT_FLAG)))
{
throw new CommandLineException("Incompatible flags: " + DOT_FLAG + " and " + AUT_FLAG);
}
checkAndAddNoArgUniqueFlag(flag, new String[0]);
return i;
}
case CommandLineArgParser.AUT_FLAG:
{
if (this.parsed.containsKey(CommandLineArgParser.UNIQUE_FLAGS.get(DOT_FLAG)))
{
throw new CommandLineException("Incompatible flags: " + DOT_FLAG + " and " + AUT_FLAG);
}
checkAndAddNoArgUniqueFlag(flag, new String[0]);
return i;
}
// Non-unique flags
case CommandLineArgParser.PROJECT_FLAG:
{
return parseProject(i);
}
case CommandLineArgParser.EFSM_FLAG:
case CommandLineArgParser.VALIDATION_EFSM_FLAG:
case CommandLineArgParser.UNFAIR_EFSM_FLAG:
case CommandLineArgParser.API_GEN_FLAG:
case CommandLineArgParser.STATECHAN_API_GEN_FLAG:
{
return parseProtoAndRoleArgs(flag, i);
}
case CommandLineArgParser.EFSM_PNG_FLAG:
case CommandLineArgParser.VALIDATION_EFSM_PNG_FLAG:
case CommandLineArgParser.UNFAIR_EFSM_PNG_FLAG:
{
return parseProtoRoleAndFileArgs(flag, i);
}
case CommandLineArgParser.SGRAPH_FLAG:
case CommandLineArgParser.UNFAIR_SGRAPH_FLAG:
case CommandLineArgParser.SESSION_API_GEN_FLAG:
{
return parseProtoArg(flag, i);
}
case CommandLineArgParser.SGRAPH_PNG_FLAG:
case CommandLineArgParser.UNFAIR_SGRAPH_PNG_FLAG:
{
return parseProtoAndFileArgs(flag, i);
}
default:
{
throw new RuntimeException("[TODO] Unknown flag: " + flag);
}
}
}
private void checkAndAddNoArgUniqueFlag(String flag, String[] args) throws CommandLineException
{
ArgFlag argFlag = CommandLineArgParser.UNIQUE_FLAGS.get(flag);
if (this.parsed.containsKey(argFlag))
{
throw new CommandLineException("Duplicate flag: " + flag);
}
this.parsed.put(argFlag, args);
}
private int parseOutput(int i) throws CommandLineException
{
if ((i + 1) >= this.args.length)
{
throw new CommandLineException("Missing directory argument");
}
String dir = this.args[++i];
this.parsed.put(CommandLineArgParser.UNIQUE_FLAGS.get(CommandLineArgParser.API_OUTPUT_DIR_FLAG), new String[] { dir } );
return i;
}
private void parseMain(int i) throws CommandLineException
{
String main = args[i];
if (!CommandLineArgParser.validateModuleArg(main))
{
throw new CommandLineException("Bad module arg: " + main);
}
this.parsed.put(CommandLine.ArgFlag.MAIN_MOD, new String[] { main } );
}
private int parseImportPath(int i) throws CommandLineException
{
if ((i + 1) >= this.args.length)
{
throw new CommandLineException("Missing path argument");
}
String path = this.args[++i];
if (!validatePaths(path))
{
throw new CommandLineException("Scribble module import path '"+ path +"' is not valid\r\n");
}
//this.parsed.put(CommandLineArgParser.FLAGS.get(CommandLineArgParser.PATH_FLAG), new String[] { path });
checkAndAddNoArgUniqueFlag(CommandLineArgParser.IMPORT_PATH_FLAG, new String[] { path });
return i;
}
private int parseInlineMainModule(int i) throws CommandLineException
{
if ((i + 1) >= this.args.length)
{
throw new CommandLineException("Missing module definition");
}
String inline = this.args[++i];
checkAndAddNoArgUniqueFlag(CommandLineArgParser.INLINE_MAIN_MOD_FLAG, new String[] { inline });
return i;
}
private int parseF17(int i) throws CommandLineException
{
if ((i + 1) >= this.args.length)
{
throw new CommandLineException("Missing simple global protocol name argument");
}
String proto = this.args[++i];
checkAndAddNoArgUniqueFlag(CommandLineArgParser.F17_FLAG, new String[] { proto });
return i;
}
private int parseProject(int i) throws CommandLineException // Similar to parseProtoAndRoleArgs
{
if ((i + 2) >= this.args.length)
{
throw new CommandLineException("Missing protocol/role arguments");
}
String proto = this.args[++i];
String role = this.args[++i];
/*if (!validateProtocolName(proto)) // TODO
{
throw new RuntimeException("Protocol name '"+ proto +"' is not valid\r\n");
}*/
concatArgs(CommandLineArgParser.NON_UNIQUE_FLAGS.get(CommandLineArgParser.PROJECT_FLAG), proto, role);
return i;
}
private int parseProtoAndRoleArgs(String f, int i) throws CommandLineException
{
ArgFlag flag = CommandLineArgParser.NON_UNIQUE_FLAGS.get(f);
if ((i + 2) >= this.args.length)
{
throw new CommandLineException("Missing protocol/role arguments");
}
String proto = this.args[++i];
String role = this.args[++i];
concatArgs(flag, proto, role);
return i;
}
private int parseProtoRoleAndFileArgs(String f, int i) throws CommandLineException
{
ArgFlag flag = CommandLineArgParser.NON_UNIQUE_FLAGS.get(f);
if ((i + 3) >= this.args.length)
{
throw new CommandLineException("Missing protocol/role/file arguments");
}
String proto = this.args[++i];
String role = this.args[++i];
String png = this.args[++i];
concatArgs(flag, proto, role, png);
return i;
}
private int parseProtoArg(String f, int i) throws CommandLineException
{
ArgFlag flag = CommandLineArgParser.NON_UNIQUE_FLAGS.get(f);
if ((i + 1) >= this.args.length)
{
throw new CommandLineException("Missing protocol argument");
}
String proto = this.args[++i];
concatArgs(flag, proto);
return i;
}
private int parseProtoAndFileArgs(String f, int i) throws CommandLineException
{
ArgFlag flag = CommandLineArgParser.NON_UNIQUE_FLAGS.get(f);
if ((i + 2) >= this.args.length)
{
throw new CommandLineException("Missing protocol/file arguments");
}
String proto = this.args[++i];
String png = this.args[++i];
concatArgs(flag, proto, png);
return i;
}
private void concatArgs(ArgFlag flag, String... toAdd)
{
String[] args = this.parsed.get(flag);
if (args == null)
{
args = Arrays.copyOf(toAdd, toAdd.length);
}
else
{
String[] tmp = new String[args.length + toAdd.length];
System.arraycopy(args, 0, tmp, 0, args.length);
System.arraycopy(toAdd, 0, tmp, args.length, toAdd.length);
args = tmp;
}
this.parsed.put(flag, args);
}
// Used to guard subsequent file open attempt?
private static boolean validateModuleArg(String arg)
{
return arg.chars().noneMatch((i) ->
!Character.isLetterOrDigit(i) && i != '.' && i != File.separatorChar && i != ':' && i != '-' && i != '_'
&& i != '/'); // Hack? (cygwin)
}
private static boolean validatePaths(String paths)
{
for (String path : paths.split(File.pathSeparator))
{
if (!new File(path).isDirectory())
{
return false;
}
}
return true;
}
}
|
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.common;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.drools.FactException;
import org.drools.FactHandle;
import org.drools.RuleBase;
import org.drools.RuleBaseConfiguration.AssertBehaviour;
import org.drools.RuntimeDroolsException;
import org.drools.WorkingMemory;
import org.drools.WorkingMemoryEntryPoint;
import org.drools.base.ClassObjectType;
import org.drools.core.util.Iterator;
import org.drools.core.util.ObjectHashSet;
import org.drools.core.util.ObjectHashSet.ObjectEntry;
import org.drools.impl.StatefulKnowledgeSessionImpl.ObjectStoreWrapper;
import org.drools.reteoo.EntryPointNode;
import org.drools.reteoo.ObjectTypeConf;
import org.drools.reteoo.ObjectTypeNode;
import org.drools.reteoo.ObjectTypeNode.ObjectTypeNodeMemory;
import org.drools.reteoo.Rete;
import org.drools.rule.EntryPoint;
import org.drools.rule.Rule;
import org.drools.spi.Activation;
import org.drools.spi.FactHandleFactory;
import org.drools.spi.ObjectType;
import org.drools.spi.PropagationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class NamedEntryPoint
implements
InternalWorkingMemoryEntryPoint,
WorkingMemoryEntryPoint,
PropertyChangeListener {
protected static transient Logger logger = LoggerFactory.getLogger(NamedEntryPoint.class);
protected static final Class<?>[] ADD_REMOVE_PROPERTY_CHANGE_LISTENER_ARG_TYPES = new Class[]{PropertyChangeListener.class};
/** The arguments used when adding/removing a property change listener. */
protected final Object[] addRemovePropertyChangeListenerArgs = new Object[]{this};
private TruthMaintenanceSystem tms;
protected ObjectStore objectStore;
protected transient InternalRuleBase ruleBase;
protected EntryPoint entryPoint;
protected EntryPointNode entryPointNode;
private ObjectTypeConfigurationRegistry typeConfReg;
private final AbstractWorkingMemory wm;
private FactHandleFactory handleFactory;
protected final ReentrantLock lock;
protected Set<InternalFactHandle> dynamicFacts = null;
public NamedEntryPoint(EntryPoint entryPoint,
EntryPointNode entryPointNode,
AbstractWorkingMemory wm) {
this( entryPoint,
entryPointNode,
wm,
new ReentrantLock() );
}
public NamedEntryPoint(EntryPoint entryPoint,
EntryPointNode entryPointNode,
AbstractWorkingMemory wm,
ReentrantLock lock) {
this.entryPoint = entryPoint;
this.entryPointNode = entryPointNode;
this.wm = wm;
this.ruleBase = (InternalRuleBase) this.wm.getRuleBase();
this.lock = lock;
this.typeConfReg = new ObjectTypeConfigurationRegistry( this.ruleBase );
this.handleFactory = this.wm.getFactHandleFactory();
this.objectStore = new SingleThreadedObjectStore( this.ruleBase.getConfiguration(),
this.lock );
}
public void reset() {
this.objectStore.clear();
}
public ObjectStore getObjectStore() {
return this.objectStore;
}
public EntryPointNode getEntryPointNode() {
return this.entryPointNode;
}
/**
* @see WorkingMemory
*/
public FactHandle insert(final Object object) throws FactException {
return insert( object, /* Not-Dynamic */
null,
false,
false,
null,
null );
}
public FactHandle insert(final Object object,
final boolean dynamic) throws FactException {
return insert( object,
null,
dynamic,
false,
null,
null );
}
protected FactHandle insert(final Object object,
final Object tmsValue,
final boolean dynamic,
boolean logical,
final Rule rule,
final Activation activation) throws FactException {
if ( object == null ) {
// you cannot assert a null object
return null;
}
try {
this.wm.startOperation();
ObjectTypeConf typeConf = this.typeConfReg.getObjectTypeConf( this.entryPoint,
object );
if ( logical && !typeConf.isTMSEnabled()) {
enableTMS(object, typeConf);
}
InternalFactHandle handle = null;
if ( this.wm.isSequential() ) {
handle = createHandle( object,
typeConf );
insert( handle,
object,
rule,
activation,
typeConf );
return handle;
}
final PropagationContext propagationContext = new PropagationContextImpl( this.wm.getNextPropagationIdCounter(),
PropagationContext.INSERTION,
rule,
(activation == null) ? null : activation.getTuple(),
handle,
this.wm.agenda.getActiveActivations(),
this.wm.agenda.getDormantActivations(),
entryPoint );
try {
this.lock.lock();
this.ruleBase.readLock();
// check if the object already exists in the WM
handle = this.objectStore.getHandleForObject( object );
if ( typeConf.isTMSEnabled() ) {
TruthMaintenanceSystem tms = getTruthMaintenanceSystem();
if ( handle != null ) {
insertWhenHandleExists( object, tmsValue, logical, rule, activation, typeConf, handle, tms, propagationContext );
return handle;
}
// get the key for other "equal" objects, returns null if none exist
EqualityKey key = tms.get( object );
if ( logical ) {
if ( key != null && key.getStatus() == EqualityKey.STATED ) {
// You cannot logically insert a previously stated equality equal object, so return null
return null;
}
if ( key == null ) {
handle = createHandle( object,
typeConf ); // we know the handle is null
key = new EqualityKey( handle );
handle.setEqualityKey( key );
tms.put( key );
key.setStatus( EqualityKey.JUSTIFIED ); // new Key, so we know it's JUSTIFIED
} else {
handle = key.getFactHandle();
}
// Any logical propagations are handled via the TMS.addLogicalDependency
tms.addLogicalDependency( handle,
object,
tmsValue,
activation,
activation.getPropagationContext(),
rule,
typeConf );
return key.getFactHandle();
} else { // !logical
if ( key == null ) {
handle = createHandle( object,
typeConf ); // we know the handle is null
key = new EqualityKey( handle );
handle.setEqualityKey( key );
tms.put( key );
} else if ( key.getStatus() == EqualityKey.JUSTIFIED ) {
// Its previous justified, so switch to stated
key.setStatus( EqualityKey.STATED ); // must be done before the justifiedHandle retract
// remove logical dependencies
final InternalFactHandle justifiedHandle = key.getFactHandle();
((PropagationContextImpl)propagationContext).setFactHandle( justifiedHandle ); // necessary to stop recursive retractions
TruthMaintenanceSystemHelper.clearLogicalDependencies( justifiedHandle, propagationContext );
// now update existing handle to new value
return update( justifiedHandle, true, object, Long.MAX_VALUE, activation );
} else { // STATED
handle = createHandle( object,
typeConf ); // we know the handle is null
handle.setEqualityKey( key );
key.addFactHandle( handle );
}
key.setStatus( EqualityKey.STATED ); // KEY is always stated
}
} else {
// TMS not enabled for this object type
if ( handle != null ) {
return handle;
}
handle = createHandle( object,
typeConf );
}
// if the dynamic parameter is true or if the user declared the fact type with the meta tag:
// @propertyChangeSupport
if ( dynamic || typeConf.isDynamic() ) {
addPropertyChangeListener( handle, dynamic );
}
insert( handle,
object,
rule,
activation,
typeConf );
} finally {
this.ruleBase.readUnlock();
this.lock.unlock();
}
return handle;
} finally {
this.wm.endOperation();
}
}
private void insertWhenHandleExists(final Object object,
final Object tmsValue,
boolean logical,
final Rule rule,
final Activation activation,
ObjectTypeConf typeConf,
InternalFactHandle handle,
TruthMaintenanceSystem tms,
final PropagationContext propagationContext ) {
EqualityKey key;
// Object is already asserted, so check and possibly correct its
// status and then return the handle
key = handle.getEqualityKey();
if ( key.getStatus() == EqualityKey.STATED ) {
// return null as you cannot justify a stated object.
return;
}
if ( !logical ) {
// this object was previously justified, so we have to override it to stated
key.setStatus( EqualityKey.STATED );
TruthMaintenanceSystemHelper.removeLogicalDependencies( handle, propagationContext );
} else {
// this was object is already justified, so just add new logical dependency
tms.addLogicalDependency( handle,
object,
tmsValue,
activation,
activation.getPropagationContext(),
rule,
typeConf );
}
}
public void insert(final InternalFactHandle handle,
final Object object,
final Rule rule,
final Activation activation,
ObjectTypeConf typeConf) {
this.ruleBase.executeQueuedActions();
this.wm.executeQueuedActions();
if ( activation != null ) {
// release resources so that they can be GC'ed
activation.getPropagationContext().releaseResources();
}
final PropagationContext propagationContext = new PropagationContextImpl( this.wm.getNextPropagationIdCounter(),
PropagationContext.INSERTION,
rule,
(activation == null) ? null : activation.getTuple(),
handle,
this.wm.agenda.getActiveActivations(),
this.wm.agenda.getDormantActivations(),
entryPoint );
this.entryPointNode.assertObject( handle,
propagationContext,
typeConf,
this.wm );
propagationContext.evaluateActionQueue( this.wm );
this.wm.workingMemoryEventSupport.fireObjectInserted( propagationContext,
handle,
object,
this.wm );
this.wm.executeQueuedActions();
if ( rule == null ) {
// This is not needed for internal WM actions as the firing rule will unstage
this.wm.getAgenda().unstageActivations();
}
}
public void update(final org.kie.runtime.rule.FactHandle factHandle,
final Object object) throws FactException {
InternalFactHandle handle = (InternalFactHandle) factHandle;
update( handle,
false,
object,
Long.MAX_VALUE,
null );
}
public void update(final org.kie.runtime.rule.FactHandle factHandle,
final Object object,
final long mask,
final Activation activation) throws FactException {
InternalFactHandle handle = (InternalFactHandle) factHandle;
update( handle,
false,
object,
mask,
activation );
}
public InternalFactHandle update(InternalFactHandle handle,
final boolean updateLogical,
final Object object,
final long mask,
final Activation activation) throws FactException {
try {
this.lock.lock();
this.ruleBase.readLock();
this.wm.startOperation();
this.ruleBase.executeQueuedActions();
// the handle might have been disconnected, so reconnect if it has
if ( handle.isDisconnected() ) {
handle = this.objectStore.reconnect( handle );
}
final Object originalObject = handle.getObject();
if ( handle.getEntryPoint() != this ) {
throw new IllegalArgumentException( "Invalid Entry Point. You updated the FactHandle on entry point '" + handle.getEntryPoint().getEntryPointId() + "' instead of '" + getEntryPointId() + "'" );
}
final ObjectTypeConf typeConf = this.typeConfReg.getObjectTypeConf( this.entryPoint,
object );
// only needed if we maintain tms, but either way we must get it before we do the update
int status = -1;
if ( typeConf.isTMSEnabled() ) {
status = handle.getEqualityKey().getStatus();
}
if ( handle.getId() == -1 || object == null || (handle.isEvent() && ((EventFactHandle) handle).isExpired()) ) {
// the handle is invalid, most likely already retracted, so return and we cannot assert a null object
return handle;
}
if ( activation != null ) {
// release resources so that they can be GC'ed
activation.getPropagationContext().releaseResources();
}
if ( originalObject != object || !AssertBehaviour.IDENTITY.equals( this.ruleBase.getConfiguration().getAssertBehaviour() ) ) {
this.objectStore.removeHandle( handle );
// set anyway, so that it updates the hashCodes
handle.setObject( object );
this.objectStore.addHandle( handle,
object );
}
this.handleFactory.increaseFactHandleRecency( handle );
Rule rule = activation == null ? null : activation.getRule();
final PropagationContext propagationContext = new PropagationContextImpl( this.wm.getNextPropagationIdCounter(),
PropagationContext.MODIFICATION,
rule,
(activation == null) ? null : activation.getTuple(),
handle,
this.wm.agenda.getActiveActivations(),
this.wm.agenda.getDormantActivations(),
entryPoint,
mask );
if ( typeConf.isTMSEnabled() ) {
EqualityKey newKey = tms.get( object );
EqualityKey oldKey = handle.getEqualityKey();
if ( newKey == null ) {
if ( oldKey.getStatus() == EqualityKey.JUSTIFIED ) {
// new target key is JUSTFIED, updates are always STATED
TruthMaintenanceSystemHelper.removeLogicalDependencies( oldKey.getFactHandle(), propagationContext );
}
oldKey.removeFactHandle( handle );
// If the equality key is now empty, then remove it
if ( oldKey.isEmpty() ) {
getTruthMaintenanceSystem().remove( oldKey );
}
newKey = new EqualityKey( handle,
EqualityKey.STATED ); // updates are always stated
handle.setEqualityKey( newKey );
getTruthMaintenanceSystem().put( newKey );
} else if ( newKey != oldKey ) {
oldKey.removeFactHandle( handle );
// If the equality key is now empty, then remove it
if ( oldKey.isEmpty() ) {
getTruthMaintenanceSystem().remove( oldKey );
}
if ( newKey.getStatus() == EqualityKey.JUSTIFIED ) {
// new target key is JUSTITIED, updates are always STATED
TruthMaintenanceSystemHelper.removeLogicalDependencies( newKey.getFactHandle(), propagationContext );
newKey.setStatus( EqualityKey.STATED );
}
// the caller needs the new handle
handle = newKey.getFactHandle();
} else if ( !updateLogical && oldKey.getStatus() == EqualityKey.JUSTIFIED ) {
// new target key is JUSTIFIED, updates are always STATED
TruthMaintenanceSystemHelper.removeLogicalDependencies( oldKey.getFactHandle(), propagationContext );
}
}
this.entryPointNode.modifyObject( handle,
propagationContext,
typeConf,
this.wm );
propagationContext.evaluateActionQueue( this.wm );
this.wm.workingMemoryEventSupport.fireObjectUpdated( propagationContext,
handle,
originalObject,
object,
this.wm );
this.wm.executeQueuedActions();
if ( rule == null ) {
// This is not needed for internal WM actions as the firing rule will unstage
this.wm.getAgenda().unstageActivations();
}
} finally {
this.wm.endOperation();
this.ruleBase.readUnlock();
this.lock.unlock();
}
return handle;
}
public void retract(final org.kie.runtime.rule.FactHandle handle) throws FactException {
delete( (org.drools.FactHandle) handle,
null,
null );
}
public void delete(final org.kie.runtime.rule.FactHandle handle) throws FactException {
delete( (org.drools.FactHandle) handle,
null,
null );
}
public void delete(final org.drools.FactHandle factHandle,
final Rule rule,
final Activation activation) throws FactException {
if ( factHandle == null ) {
throw new IllegalArgumentException( "FactHandle cannot be null " );
}
try {
this.lock.lock();
this.ruleBase.readLock();
this.wm.startOperation();
this.ruleBase.executeQueuedActions();
InternalFactHandle handle = (InternalFactHandle) factHandle;
if ( handle.getId() == -1 ) {
// can't retract an already retracted handle
return;
}
// the handle might have been disconnected, so reconnect if it has
if ( handle.isDisconnected() ) {
handle = this.objectStore.reconnect( handle );
}
if ( handle.getEntryPoint() != this ) {
throw new IllegalArgumentException( "Invalid Entry Point. You updated the FactHandle on entry point '" + handle.getEntryPoint().getEntryPointId() + "' instead of '" + getEntryPointId() + "'" );
}
final Object object = handle.getObject();
final ObjectTypeConf typeConf = this.typeConfReg.getObjectTypeConf( this.entryPoint,
object );
if( typeConf.isSupportsPropertyChangeListeners() ) {
removePropertyChangeListener( handle, true );
}
if ( activation != null ) {
// release resources so that they can be GC'ed
activation.getPropagationContext().releaseResources();
}
final PropagationContext propagationContext = new PropagationContextImpl( this.wm.getNextPropagationIdCounter(),
PropagationContext.DELETION,
rule,
(activation == null) ? null : activation.getTuple(),
handle,
this.wm.agenda.getActiveActivations(),
this.wm.agenda.getDormantActivations(),
this.entryPoint );
this.entryPointNode.retractObject( handle,
propagationContext,
typeConf,
this.wm );
if ( typeConf.isTMSEnabled() ) {
TruthMaintenanceSystem tms = getTruthMaintenanceSystem();
// TMS.removeLogicalDependency also cleans up Handles from the EqualityKey
// This can happen on the logical retraction of the last FH, where it's cleaned up in the TMS and also in the main network.
// However when the user retracts the FH to a logical set of insertions, then we need to clean up the TMS here.
// Update the equality key, which maintains a list of stated FactHandles
final EqualityKey key = handle.getEqualityKey();
// Its justified so attempt to remove any logical dependencies for the handle
if ( key.getStatus() == EqualityKey.JUSTIFIED ) {
TruthMaintenanceSystemHelper.removeLogicalDependencies( handle, propagationContext );
}
key.removeFactHandle( handle );
handle.setEqualityKey( null );
// If the equality key is now empty, then remove it
if ( key.isEmpty() ) {
tms.remove( key );
}
}
propagationContext.evaluateActionQueue( this.wm );
this.wm.workingMemoryEventSupport.fireObjectRetracted( propagationContext,
handle,
object,
this.wm );
this.wm.executeQueuedActions();
this.objectStore.removeHandle( handle );
this.handleFactory.destroyFactHandle( handle );
if ( rule == null ) {
// This is not needed for internal WM actions as the firing rule will unstage
this.wm.getAgenda().unstageActivations();
}
} finally {
this.wm.endOperation();
this.ruleBase.readUnlock();
this.lock.unlock();
}
}
protected void addPropertyChangeListener(final InternalFactHandle handle, final boolean dynamicFlag ) {
Object object = handle.getObject();
try {
final Method method = object.getClass().getMethod( "addPropertyChangeListener",
NamedEntryPoint.ADD_REMOVE_PROPERTY_CHANGE_LISTENER_ARG_TYPES );
method.invoke( object,
this.addRemovePropertyChangeListenerArgs );
if( dynamicFlag ) {
if( dynamicFacts == null ) {
dynamicFacts = new HashSet<InternalFactHandle>();
}
dynamicFacts.add( handle );
}
} catch ( final NoSuchMethodException e ) {
logger.error( "Warning: Method addPropertyChangeListener not found" + " on the class " + object.getClass() + " so Drools will be unable to process JavaBean" + " PropertyChangeEvents on the asserted Object" );
} catch ( final IllegalArgumentException e ) {
logger.error( "Warning: The addPropertyChangeListener method" + " on the class " + object.getClass() + " does not take" + " a simple PropertyChangeListener argument" + " so Drools will be unable to process JavaBean"
+ " PropertyChangeEvents on the asserted Object" );
} catch ( final IllegalAccessException e ) {
logger.error( "Warning: The addPropertyChangeListener method" + " on the class " + object.getClass() + " is not public" + " so Drools will be unable to process JavaBean" + " PropertyChangeEvents on the asserted Object" );
} catch ( final InvocationTargetException e ) {
logger.error( "Warning: The addPropertyChangeListener method" + " on the class " + object.getClass() + " threw an InvocationTargetException" + " so Drools will be unable to process JavaBean"
+ " PropertyChangeEvents on the asserted Object: " + e.getMessage() );
} catch ( final SecurityException e ) {
logger.error( "Warning: The SecurityManager controlling the class " + object.getClass() + " did not allow the lookup of a" + " addPropertyChangeListener method" + " so Drools will be unable to process JavaBean"
+ " PropertyChangeEvents on the asserted Object: " + e.getMessage() );
}
}
protected void removePropertyChangeListener(final FactHandle handle, final boolean removeFromSet ) {
Object object = null;
try {
object = ((InternalFactHandle) handle).getObject();
if ( dynamicFacts != null && removeFromSet ) {
dynamicFacts.remove( object );
}
if ( object != null ) {
final Method mehod = object.getClass().getMethod( "removePropertyChangeListener",
NamedEntryPoint.ADD_REMOVE_PROPERTY_CHANGE_LISTENER_ARG_TYPES );
mehod.invoke( object,
this.addRemovePropertyChangeListenerArgs );
}
} catch ( final NoSuchMethodException e ) {
// The removePropertyChangeListener method on the class
// was not found so Drools will be unable to
// stop processing JavaBean PropertyChangeEvents
// on the retracted Object
} catch ( final IllegalArgumentException e ) {
throw new RuntimeDroolsException( "Warning: The removePropertyChangeListener method on the class " + object.getClass() + " does not take a simple PropertyChangeListener argument so Drools will be unable to stop processing JavaBean"
+ " PropertyChangeEvents on the retracted Object" );
} catch ( final IllegalAccessException e ) {
throw new RuntimeDroolsException( "Warning: The removePropertyChangeListener method on the class " + object.getClass() + " is not public so Drools will be unable to stop processing JavaBean PropertyChangeEvents on the retracted Object" );
} catch ( final InvocationTargetException e ) {
throw new RuntimeDroolsException( "Warning: The removePropertyChangeL istener method on the class " + object.getClass() + " threw an InvocationTargetException so Drools will be unable to stop processing JavaBean"
+ " PropertyChangeEvents on the retracted Object: " + e.getMessage() );
} catch ( final SecurityException e ) {
throw new RuntimeDroolsException( "Warning: The SecurityManager controlling the class " + object.getClass() + " did not allow the lookup of a removePropertyChangeListener method so Drools will be unable to stop processing JavaBean"
+ " PropertyChangeEvents on the retracted Object: " + e.getMessage() );
}
}
public WorkingMemoryEntryPoint getWorkingMemoryEntryPoint(String name) {
return this.wm.getWorkingMemoryEntryPoint( name );
}
public ObjectTypeConfigurationRegistry getObjectTypeConfigurationRegistry() {
return this.typeConfReg;
}
public RuleBase getRuleBase() {
return this.ruleBase;
}
public FactHandle getFactHandle(Object object) {
return this.objectStore.getHandleForObject( object );
}
public EntryPoint getEntryPoint() {
return this.entryPoint;
}
public InternalWorkingMemory getInternalWorkingMemory() {
return this.wm;
}
public FactHandle getFactHandleByIdentity(final Object object) {
return this.objectStore.getHandleForObjectIdentity( object );
}
public Object getObject(org.kie.runtime.rule.FactHandle factHandle) {
return this.objectStore.getObjectForHandle(factHandle);
}
@SuppressWarnings("unchecked")
public <T extends org.kie.runtime.rule.FactHandle> Collection<T> getFactHandles() {
return new ObjectStoreWrapper( this.objectStore,
null,
ObjectStoreWrapper.FACT_HANDLE );
}
@SuppressWarnings("unchecked")
public <T extends org.kie.runtime.rule.FactHandle> Collection<T> getFactHandles(org.kie.runtime.ObjectFilter filter) {
return new ObjectStoreWrapper( this.objectStore,
filter,
ObjectStoreWrapper.FACT_HANDLE );
}
@SuppressWarnings("unchecked")
public Collection<Object> getObjects() {
return new ObjectStoreWrapper( this.objectStore,
null,
ObjectStoreWrapper.OBJECT );
}
@SuppressWarnings("unchecked")
public Collection<Object> getObjects(org.kie.runtime.ObjectFilter filter) {
return new ObjectStoreWrapper( this.objectStore,
filter,
ObjectStoreWrapper.OBJECT );
}
public String getEntryPointId() {
return this.entryPoint.getEntryPointId();
}
public long getFactCount() {
return this.objectStore.size();
}
private InternalFactHandle createHandle(final Object object,
ObjectTypeConf typeConf) {
InternalFactHandle handle;
handle = this.handleFactory.newFactHandle( object,
typeConf,
this.wm,
this );
this.objectStore.addHandle( handle,
object );
return handle;
}
/**
* TMS will be automatically enabled when the first logical insert happens.
*
* We will take all the already asserted objects of the same type and initialize
* the equality map.
*
* @param object the logically inserted object.
* @param conf the type's configuration.
*/
private void enableTMS(Object object, ObjectTypeConf conf) {
final Rete source = this.ruleBase.getRete();
final ClassObjectType cot = new ClassObjectType( object.getClass() );
final Map<ObjectType, ObjectTypeNode> map = source.getObjectTypeNodes( EntryPoint.DEFAULT );
final ObjectTypeNode node = map.get( cot );
final ObjectHashSet memory = ((ObjectTypeNodeMemory) this.wm.getNodeMemory( node )).memory;
// All objects of this type that are already there were certainly stated,
// since this method call happens at the first logical insert, for any given type.
org.drools.core.util.Iterator it = memory.iterator();
for ( Object obj = it.next(); obj != null; obj = it.next() ) {
org.drools.core.util.ObjectHashSet.ObjectEntry holder = (org.drools.core.util.ObjectHashSet.ObjectEntry) obj;
InternalFactHandle handle = (InternalFactHandle) holder.getValue();
if ( handle != null) {
EqualityKey key = new EqualityKey( handle );
handle.setEqualityKey( key );
key.setStatus(EqualityKey.STATED);
getTruthMaintenanceSystem().put(key);
}
}
// Enable TMS for this type.
conf.enableTMS();
}
public void propertyChange(final PropertyChangeEvent event) {
final Object object = event.getSource();
try {
FactHandle handle = getFactHandle( object );
if ( handle == null ) {
throw new FactException( "Update error: handle not found for object: " + object + ". Is it in the working memory?" );
}
update( handle,
object );
} catch ( final FactException e ) {
throw new RuntimeDroolsException( e.getMessage() );
}
}
public void dispose() {
if( dynamicFacts != null ) {
// first we check for facts that were inserted into the working memory
// using the old API and setting a per instance dynamic flag and remove the
// session from the listeners list in the bean
for( InternalFactHandle handle : dynamicFacts ) {
removePropertyChangeListener( handle, false );
}
dynamicFacts = null;
}
for( ObjectTypeConf conf : this.typeConfReg.values() ) {
// then, we check if any of the object types were configured using the
// @propertyChangeSupport annotation, and clean them up
if( conf.isDynamic() && conf.isSupportsPropertyChangeListeners() ) {
// it is enough to iterate the facts on the concrete object type nodes
// only, as the facts will always be in their concrete object type nodes
// even if they were also asserted into higher level OTNs as well
ObjectTypeNode otn = conf.getConcreteObjectTypeNode();
final ObjectHashSet memory = ((ObjectTypeNodeMemory) this.getInternalWorkingMemory().getNodeMemory( otn )).memory;
Iterator it = memory.iterator();
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
InternalFactHandle handle = (InternalFactHandle) entry.getValue();
removePropertyChangeListener( handle, false );
}
}
}
}
public void enQueueWorkingMemoryAction(WorkingMemoryAction action) {
wm.queueWorkingMemoryAction( action );
}
public TruthMaintenanceSystem getTruthMaintenanceSystem() {
if (tms == null) {
tms = new TruthMaintenanceSystem(wm, this);
}
return tms;
}
}
|
|
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yangutils.datamodel;
import org.onosproject.yangutils.datamodel.exceptions.DataModelException;
import com.google.common.base.Strings;
import static org.onosproject.yangutils.datamodel.ResolvableStatus.INTRA_FILE_RESOLVED;
import static org.onosproject.yangutils.datamodel.ResolvableStatus.RESOLVED;
import static org.onosproject.yangutils.datamodel.YangDataTypes.BITS;
import static org.onosproject.yangutils.datamodel.YangDataTypes.BOOLEAN;
import static org.onosproject.yangutils.datamodel.YangDataTypes.DERIVED;
import static org.onosproject.yangutils.datamodel.YangDataTypes.EMPTY;
import static org.onosproject.yangutils.datamodel.YangDataTypes.ENUMERATION;
import static org.onosproject.yangutils.datamodel.YangDataTypes.IDENTITYREF;
import static org.onosproject.yangutils.datamodel.YangDataTypes.LEAFREF;
import static org.onosproject.yangutils.datamodel.YangDataTypes.STRING;
import static org.onosproject.yangutils.datamodel.YangDataTypes.UNION;
import static org.onosproject.yangutils.utils.RestrictionResolver.isOfRangeRestrictedType;
import static org.onosproject.yangutils.utils.RestrictionResolver.processLengthRestriction;
import static org.onosproject.yangutils.utils.RestrictionResolver.processRangeRestriction;
/**
* Represents the derived information.
*
* @param <T> extended information.
*/
public class YangDerivedInfo<T> implements LocationInfo {
/**
* YANG typedef reference.
*/
private YangTypeDef referredTypeDef;
/**
* Resolved additional information about data type after linking, example
* restriction info, named values, etc. The extra information is based
* on the data type. Based on the data type, the extended info can vary.
*/
private T resolvedExtendedInfo;
/**
* Line number of pattern restriction in YANG file.
*/
private int lineNumber;
/**
* Position of pattern restriction in line.
*/
private int charPositionInLine;
/**
* Effective built-in type, requried in case type of typedef is again a
* derived type. This information is to be added during linking.
*/
private YangDataTypes effectiveBuiltInType;
/**
* Length restriction string to temporary store the length restriction when the type
* is derived.
*/
private String lengthRestrictionString;
/**
* Range restriction string to temporary store the range restriction when the type
* is derived.
*/
private String rangeRestrictionString;
/**
* Pattern restriction string to temporary store the pattern restriction when the type
* is derived.
*/
private YangPatternRestriction patternRestriction;
/**
* Returns the referred typedef reference.
*
* @return referred typedef reference
*/
public YangTypeDef getReferredTypeDef() {
return referredTypeDef;
}
/**
* Sets the referred typedef reference.
*
* @param referredTypeDef referred typedef reference
*/
public void setReferredTypeDef(YangTypeDef referredTypeDef) {
this.referredTypeDef = referredTypeDef;
}
/**
* Returns resolved extended information after successful linking.
*
* @return resolved extended information
*/
public T getResolvedExtendedInfo() {
return resolvedExtendedInfo;
}
/**
* Sets resolved extended information after successful linking.
*
* @param resolvedExtendedInfo resolved extended information
*/
public void setResolvedExtendedInfo(T resolvedExtendedInfo) {
this.resolvedExtendedInfo = resolvedExtendedInfo;
}
@Override
public int getLineNumber() {
return lineNumber;
}
@Override
public int getCharPosition() {
return charPositionInLine;
}
@Override
public void setLineNumber(int lineNumber) {
this.lineNumber = lineNumber;
}
@Override
public void setCharPosition(int charPositionInLine) {
this.charPositionInLine = charPositionInLine;
}
/**
* Returns the length restriction string.
*
* @return the length restriction string
*/
public String getLengthRestrictionString() {
return lengthRestrictionString;
}
/**
* Sets the length restriction string.
*
* @param lengthRestrictionString the length restriction string
*/
public void setLengthRestrictionString(String lengthRestrictionString) {
this.lengthRestrictionString = lengthRestrictionString;
}
/**
* Returns the range restriction string.
*
* @return the range restriction string
*/
public String getRangeRestrictionString() {
return rangeRestrictionString;
}
/**
* Sets the range restriction string.
*
* @param rangeRestrictionString the range restriction string
*/
public void setRangeRestrictionString(String rangeRestrictionString) {
this.rangeRestrictionString = rangeRestrictionString;
}
/**
* Returns the pattern restriction.
*
* @return the pattern restriction
*/
public YangPatternRestriction getPatternRestriction() {
return patternRestriction;
}
/**
* Sets the pattern restriction.
*
* @param patternRestriction the pattern restriction
*/
public void setPatternRestriction(YangPatternRestriction patternRestriction) {
this.patternRestriction = patternRestriction;
}
/**
* Returns effective built-in type.
*
* @return effective built-in type
*/
public YangDataTypes getEffectiveBuiltInType() {
return effectiveBuiltInType;
}
/**
* Sets effective built-in type.
*
* @param effectiveBuiltInType effective built-in type
*/
public void setEffectiveBuiltInType(YangDataTypes effectiveBuiltInType) {
this.effectiveBuiltInType = effectiveBuiltInType;
}
/**
* Resolves the type derived info, by obtaining the effective built-in type
* and resolving the restrictions.
*
* @return resolution status
* @throws DataModelException a violation in data mode rule
*/
public ResolvableStatus resolve() throws DataModelException {
YangType<?> baseType = getReferredTypeDef().getTypeDefBaseType();
/*
* Checks the data type of the referred typedef, if it's derived,
* obtain effective built-in type and restrictions from it's derived
* info, otherwise take from the base type of type itself.
*/
if (baseType.getDataType() == DERIVED) {
/*
* Check whether the referred typedef is resolved.
*/
if (baseType.getResolvableStatus() != INTRA_FILE_RESOLVED && baseType.getResolvableStatus() != RESOLVED) {
throw new DataModelException("Linker Error: Referred typedef is not resolved.");
}
/*
* Check if the referred typedef is intra file resolved, if yes sets
* current status also to intra file resolved .
*/
if (getReferredTypeDef().getTypeDefBaseType().getResolvableStatus() == INTRA_FILE_RESOLVED) {
return INTRA_FILE_RESOLVED;
}
setEffectiveBuiltInType(((YangDerivedInfo<?>) baseType.getDataTypeExtendedInfo())
.getEffectiveBuiltInType());
YangDerivedInfo refDerivedInfo = ((YangDerivedInfo<?>) baseType.getDataTypeExtendedInfo());
/*
* Check whether the effective built-in type can have range
* restrictions, if yes call resolution of range.
*/
if (isOfRangeRestrictedType(getEffectiveBuiltInType())) {
if (refDerivedInfo.getResolvedExtendedInfo() == null) {
resolveRangeRestriction(null);
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
} else {
if (!(refDerivedInfo.getResolvedExtendedInfo() instanceof YangRangeRestriction)) {
throw new DataModelException("Linker error: Referred typedef restriction info is of invalid " +
"type.");
}
resolveRangeRestriction((YangRangeRestriction) refDerivedInfo.getResolvedExtendedInfo());
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
}
/*
* If the effective built-in type is of type string calls
* for string resolution.
*/
} else if (getEffectiveBuiltInType() == STRING) {
if (refDerivedInfo.getResolvedExtendedInfo() == null) {
resolveStringRestriction(null);
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
} else {
if (!(refDerivedInfo.getResolvedExtendedInfo() instanceof YangStringRestriction)) {
throw new DataModelException("Linker error: Referred typedef restriction info is of invalid " +
"type.");
}
resolveStringRestriction((YangStringRestriction) refDerivedInfo.getResolvedExtendedInfo());
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
}
}
} else {
setEffectiveBuiltInType((baseType.getDataType()));
/*
* Check whether the effective built-in type can have range
* restrictions, if yes call resolution of range.
*/
if (isOfRangeRestrictedType(getEffectiveBuiltInType())) {
if (baseType.getDataTypeExtendedInfo() == null) {
resolveRangeRestriction(null);
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
} else {
if (!(baseType.getDataTypeExtendedInfo() instanceof YangRangeRestriction)) {
throw new DataModelException("Linker error: Referred typedef restriction info is of invalid " +
"type.");
}
resolveRangeRestriction((YangRangeRestriction) baseType.getDataTypeExtendedInfo());
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
}
/*
* If the effective built-in type is of type string calls
* for string resolution.
*/
} else if (getEffectiveBuiltInType() == STRING) {
if (baseType.getDataTypeExtendedInfo() == null) {
resolveStringRestriction(null);
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
} else {
if (!(baseType.getDataTypeExtendedInfo() instanceof YangStringRestriction)) {
throw new DataModelException("Linker error: Referred typedef restriction info is of invalid " +
"type.");
}
resolveStringRestriction((YangStringRestriction) baseType.getDataTypeExtendedInfo());
/*
* Return the resolution status as resolved, if it's not
* resolve range/string restriction will throw exception
* in previous function.
*/
return RESOLVED;
}
}
}
/*
* Check if the data type is the one which can't be restricted, in
* this case check whether no self restrictions should be present.
*/
if (isOfValidNonRestrictedType(getEffectiveBuiltInType())) {
if (Strings.isNullOrEmpty(getLengthRestrictionString())
&& Strings.isNullOrEmpty(getRangeRestrictionString())
&& getPatternRestriction() == null) {
return RESOLVED;
} else {
throw new DataModelException("YANG file error: Restrictions can't be applied to a given type");
}
}
// Throw exception for unsupported types
throw new DataModelException("Linker error: Unable to process the derived type.");
}
/**
* Resolves the string restrictions.
*
* @param refStringRestriction referred string restriction of typedef
* @throws DataModelException a violation in data model rule
*/
private void resolveStringRestriction(YangStringRestriction refStringRestriction) throws DataModelException {
YangStringRestriction curStringRestriction = null;
YangRangeRestriction refRangeRestriction = null;
YangPatternRestriction refPatternRestriction = null;
/*
* Check that range restriction should be null when built-in type is
* string.
*/
if (!(Strings.isNullOrEmpty(getRangeRestrictionString()))) {
DataModelException dataModelException = new DataModelException("YANG file error: Range restriction " +
"should't be present for string data type.");
dataModelException.setLine(lineNumber);
dataModelException.setCharPosition(charPositionInLine);
throw dataModelException;
}
/*
* If referred restriction and self restriction both are null, no
* resolution is required.
*/
if (refStringRestriction == null && Strings.isNullOrEmpty(getLengthRestrictionString())
&& getPatternRestriction() == null) {
return;
}
/*
* If referred string restriction is not null, take value of length
* and pattern restriction and assign.
*/
if (refStringRestriction != null) {
refRangeRestriction = refStringRestriction.getLengthRestriction();
refPatternRestriction = refStringRestriction.getPatternRestriction();
}
YangRangeRestriction lengthRestriction = resolveLengthRestriction(refRangeRestriction);
YangPatternRestriction patternRestriction = resolvePatternRestriction(refPatternRestriction);
/*
* Check if either of length or pattern restriction is present, if yes
* create string restriction and assign value.
*/
if (lengthRestriction != null || patternRestriction != null) {
curStringRestriction = new YangStringRestriction();
curStringRestriction.setLengthRestriction(lengthRestriction);
curStringRestriction.setPatternRestriction(patternRestriction);
}
setResolvedExtendedInfo((T) curStringRestriction);
}
/**
* Resolves pattern restriction.
*
* @param refPatternRestriction referred pattern restriction of typedef
* @return resolved pattern restriction
*/
private YangPatternRestriction resolvePatternRestriction(YangPatternRestriction refPatternRestriction) {
/*
* If referred restriction and self restriction both are null, no
* resolution is required.
*/
if (refPatternRestriction == null && getPatternRestriction() == null) {
return null;
}
/*
* If self restriction is null, and referred restriction is present
* shallow copy the referred to self.
*/
if (getPatternRestriction() == null) {
return refPatternRestriction;
}
/*
* If referred restriction is null, and self restriction is present
* carry out self resolution.
*/
if (refPatternRestriction == null) {
return getPatternRestriction();
}
/*
* Get patterns of referred type and add it to current pattern
* restrictions.
*/
for (String pattern : refPatternRestriction.getPatternList()) {
getPatternRestriction().addPattern(pattern);
}
return getPatternRestriction();
}
/**
* Resolves the length restrictions.
*
* @param refLengthRestriction referred length restriction of typedef
* @return resolved length restriction
* @throws DataModelException a violation in data model rule
*/
private YangRangeRestriction resolveLengthRestriction(YangRangeRestriction refLengthRestriction) throws
DataModelException {
/*
* If referred restriction and self restriction both are null, no
* resolution is required.
*/
if (refLengthRestriction == null && Strings.isNullOrEmpty(getLengthRestrictionString())) {
return null;
}
/*
* If self restriction is null, and referred restriction is present
* shallow copy the referred to self.
*/
if (Strings.isNullOrEmpty(getLengthRestrictionString())) {
return refLengthRestriction;
}
/*
* If referred restriction is null, and self restriction is present
* carry out self resolution.
*/
if (refLengthRestriction == null) {
YangRangeRestriction curLengthRestriction = processLengthRestriction(null, lineNumber,
charPositionInLine, false, getLengthRestrictionString());
return curLengthRestriction;
}
/*
* Carry out self resolution based with obtained effective built-in
* type and MIN/MAX values as per the referred typedef's values.
*/
YangRangeRestriction curLengthRestriction = processLengthRestriction(refLengthRestriction, lineNumber,
charPositionInLine, true, getLengthRestrictionString());
// Resolve the range with referred typedef's restriction.
resolveLengthAndRangeRestriction(refLengthRestriction, curLengthRestriction);
return curLengthRestriction;
}
/**
* Resolves the length/range self and referred restriction, to check whether
* the all the range interval in self restriction is stricter than the
* referred typedef's restriction.
*
* @param refRestriction referred restriction
* @param curRestriction self restriction
*/
private void resolveLengthAndRangeRestriction(YangRangeRestriction refRestriction,
YangRangeRestriction curRestriction) throws DataModelException {
for (Object curInterval : curRestriction.getAscendingRangeIntervals()) {
if (!(curInterval instanceof YangRangeInterval)) {
throw new DataModelException("Linker error: Current range intervals not processed correctly.");
}
try {
refRestriction.isValidInterval((YangRangeInterval) curInterval);
} catch (DataModelException e) {
DataModelException dataModelException = new DataModelException(e);
dataModelException.setLine(lineNumber);
dataModelException.setCharPosition(charPositionInLine);
throw dataModelException;
}
}
}
/**
* Resolves the range restrictions.
*
* @param refRangeRestriction referred range restriction of typedef
* @throws DataModelException a violation in data model rule
*/
private void resolveRangeRestriction(YangRangeRestriction refRangeRestriction) throws DataModelException {
/*
* Check that string restriction should be null when built-in type is
* of range type.
*/
if (!(Strings.isNullOrEmpty(getLengthRestrictionString())) || getPatternRestriction() != null) {
DataModelException dataModelException = new DataModelException("YANG file error: Length/Pattern " +
"restriction should't be present for int/uint/decimal data type.");
dataModelException.setLine(lineNumber);
dataModelException.setCharPosition(charPositionInLine);
throw dataModelException;
}
/*
* If referred restriction and self restriction both are null, no
* resolution is required.
*/
if (refRangeRestriction == null && Strings.isNullOrEmpty(getRangeRestrictionString())) {
return;
}
/*
* If self restriction is null, and referred restriction is present
* shallow copy the referred to self.
*/
if (Strings.isNullOrEmpty(getRangeRestrictionString())) {
setResolvedExtendedInfo((T) refRangeRestriction);
return;
}
/*
* If referred restriction is null, and self restriction is present
* carry out self resolution.
*/
if (refRangeRestriction == null) {
YangRangeRestriction curRangeRestriction = processRangeRestriction(null, lineNumber,
charPositionInLine, false, getRangeRestrictionString(), getEffectiveBuiltInType());
setResolvedExtendedInfo((T) curRangeRestriction);
return;
}
/*
* Carry out self resolution based with obtained effective built-in
* type and MIN/MAX values as per the referred typedef's values.
*/
YangRangeRestriction curRangeRestriction = processRangeRestriction(refRangeRestriction, lineNumber,
charPositionInLine, true, getRangeRestrictionString(), getEffectiveBuiltInType());
// Resolve the range with referred typedef's restriction.
resolveLengthAndRangeRestriction(refRangeRestriction, curRangeRestriction);
setResolvedExtendedInfo((T) curRangeRestriction);
}
/**
* Returns whether the data type is of non restricted type.
*
* @param dataType data type to be checked
* @return true, if data type can't be restricted, false otherwise
*/
private boolean isOfValidNonRestrictedType(YangDataTypes dataType) {
return (dataType == BOOLEAN
|| dataType == ENUMERATION
|| dataType == BITS
|| dataType == EMPTY
|| dataType == UNION
|| dataType == IDENTITYREF
|| dataType == LEAFREF);
}
}
|
|
package br.unicamp.ic.sed.mobilemedia.main.impl;
import javax.microedition.midlet.MIDlet;
import javax.microedition.midlet.MIDletStateChangeException;
import br.unicamp.ic.sed.mobilemedia.main.spec.prov.IMobileResources;
import br.unicamp.ic.sed.mobilemedia.mobilephonemgr.spec.prov.IMobilePhone;
import br.unicamp.ic.sed.mobilemedia.mobilephonemgr.spec.req.IAlbum;
//Following are pre-processor statements to include the required
//classes for device specific features. They must be commented out
//if they aren't used, otherwise it will throw exceptions trying to
//load classes that aren't available for a given platform.
/*
*
*
* This is the main Midlet class for the core J2ME application
* It contains all the basic functionality that should be executable
* in any standard J2ME device that supports MIDP 1.0 or higher.
* Any additional J2ME features for this application that are dependent
* upon a particular device (ie. optional or proprietary library) are
* de-coupled from the core application so they can be conditionally included
* depending on the target platform
*
* This Application provides a basic Photo Album interface that allows a user to view
* images on their mobile device.
* */
public class MainUIMidlet extends MIDlet implements IMobileResources {
//components
br.unicamp.ic.sed.mobilemedia.album.spec.prov.IManager album;
br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.prov.IManager filesystem;
br.unicamp.ic.sed.mobilemedia.photo.spec.prov.IManager photo;
br.unicamp.ic.sed.mobilemedia.mobilephonemgr.spec.prov.IManager mobilePhone;
br.unicamp.ic.sed.mobilemedia.exceptionhandler.spec.prov.IManager exceptionHandler;
//connectors
br.unicamp.ic.sed.mobilemedia.album_mobilephonemgr.impl.IManager album_mobilePhone;
// br.unicamp.ic.sed.mobilemedia.filesystemmgr_mobilephonemgr.impl.IManager filesystem_mobilePhone;
br.unicamp.ic.sed.mobilemedia.photo_mobilephonemgr.impl.IManager photo_mobilePhone;
br.unicamp.ic.sed.mobilemedia.mainuimidlet_album.impl.IManager mainMidlet_album;
br.unicamp.ic.sed.mobilemedia.mainuimidlet_photo.impl.IManager mainMidlet_photo;
br.unicamp.ic.sed.mobilemedia.mainuimidlet_mobilephonemgr.impl.IManager mainMidlet_mobilePhone;
br.unicamp.ic.sed.mobilemedia.filesystemmgr_album.impl.IManager filesystemmgr_album;
br.unicamp.ic.sed.mobilemedia.filesystemmgr_photo.impl.IManager filesystemmgr_photo;
br.unicamp.ic.sed.mobilemedia.photo_album.impl.IManager photo_album;
br.unicamp.ic.sed.mobilemedia.mainuimidlet_exceptionhandler.impl.IManager mainMidlet_exceptionHandler;
br.unicamp.ic.sed.mobilemedia.album_exceptionhandler.impl.IManager album_exceptionhandler;
br.unicamp.ic.sed.mobilemedia.photo_exceptionhandler.impl.IManager photo_exceptionhandler;
public MainUIMidlet() {
//do nothing
}
/**
* Start the MIDlet by creating new model and controller classes, and
* initialize them as necessary
*/
public void startApp() throws MIDletStateChangeException {
System.out.println("Starting MobileMediaOO - v1");
// create all imanagers
filesystem = br.unicamp.ic.sed.mobilemedia.filesystemmgr.impl.ComponentFactory.createInstance();
album_mobilePhone = br.unicamp.ic.sed.mobilemedia.album_mobilephonemgr.impl.ComponentFactory.createInstance();
mobilePhone = br.unicamp.ic.sed.mobilemedia.mobilephonemgr.impl.ComponentFactory.createInstance();
album = br.unicamp.ic.sed.mobilemedia.album.impl.ComponentFactory.createInstance();
exceptionHandler = br.unicamp.ic.sed.mobilemedia.exceptionhandler.impl.ComponentFactory.createInstance();
// filesystem_mobilePhone = br.unicamp.ic.sed.mobilemedia.filesystemmgr_mobilephonemgr.impl.ComponentFactory.createInstance();
mainMidlet_album = br.unicamp.ic.sed.mobilemedia.mainuimidlet_album.impl.ComponentFactory.createInstance();
photo = br.unicamp.ic.sed.mobilemedia.photo.impl.ComponentFactory.createInstance();
photo_mobilePhone = br.unicamp.ic.sed.mobilemedia.photo_mobilephonemgr.impl.ComponentFactory.createInstance();
mainMidlet_photo = br.unicamp.ic.sed.mobilemedia.mainuimidlet_photo.impl.ComponentFactory.createInstance();
mainMidlet_mobilePhone = br.unicamp.ic.sed.mobilemedia.mainuimidlet_mobilephonemgr.impl.ComponentFactory.createInstance();
filesystemmgr_album = br.unicamp.ic.sed.mobilemedia.filesystemmgr_album.impl.ComponentFactory.createInstance();
filesystemmgr_photo = br.unicamp.ic.sed.mobilemedia.filesystemmgr_photo.impl.ComponentFactory.createInstance();
photo_album = br.unicamp.ic.sed.mobilemedia.photo_album.impl.ComponentFactory.createInstance();
mainMidlet_exceptionHandler = br.unicamp.ic.sed.mobilemedia.mainuimidlet_exceptionhandler.impl.ComponentFactory.createInstance();
album_exceptionhandler = br.unicamp.ic.sed.mobilemedia.album_exceptionhandler.impl.ComponentFactory.createInstance();
photo_exceptionhandler = br.unicamp.ic.sed.mobilemedia.photo_exceptionhandler.impl.ComponentFactory.createInstance();
/*********************************************************************************************/
//setting required interfaces
//component mobilePhoneMgr
// mobilePhone.setRequiredInterface("IFilesystem", filesystem_mobilePhone.getProvidedInterface("IFilesystem"));
mobilePhone.setRequiredInterface("IPhoto", photo_mobilePhone.getProvidedInterface("IPhoto"));
mobilePhone.setRequiredInterface("IMobileResources", mainMidlet_mobilePhone.getProvidedInterface("IMobileResources"));
br.unicamp.ic.sed.mobilemedia.mobilephonemgr.spec.req.IAlbum ialbum = (IAlbum) album_mobilePhone.getProvidedInterface("IAlbum");
mobilePhone.setRequiredInterface("IAlbum", ialbum );
//component album
// album.setRequiredInterface("IMobilePhone", album_mobilePhone.getProvidedInterface("IMobilePhone"));
album.setRequiredInterface("IMobileResources", mainMidlet_album.getProvidedInterface("IMobileResources") );
album.setRequiredInterface("IPhoto", photo_album.getProvidedInterface("IPhoto"));
album.setRequiredInterface("IFilesystem",filesystemmgr_album.getProvidedInterface("IFilesystem"));
album.setRequiredInterface("IExceptionHandler", album_exceptionhandler.getProvidedInterface("IExceptionHandler"));
System.out.println("step 2");
// component photo
photo.setRequiredInterface("IMobilePhone", photo_mobilePhone.getProvidedInterface("IMobilePhone"));
photo.setRequiredInterface("IMobileResources", mainMidlet_photo.getProvidedInterface("IMobileResources") );
photo.setRequiredInterface("IFilesystem", filesystemmgr_photo.getProvidedInterface("IFilesystem"));
photo.setRequiredInterface("IExceptionHandler", photo_exceptionhandler.getProvidedInterface("IExceptionHandler"));
System.out.println("step 3");
// connectors
mainMidlet_photo.setRequiredInterface("IMobileResources", this );
mainMidlet_mobilePhone.setRequiredInterface("IMobileResources", this);
filesystemmgr_album.setRequiredInterface("IFilesystem", filesystem.getProvidedInterface("IFilesystem"));
filesystemmgr_photo.setRequiredInterface("IFilesystem", filesystem.getProvidedInterface("IFilesystem"));
photo_album.setRequiredInterface("IPhoto", photo.getProvidedInterface("IPhoto"));
// filesystem_mobilePhone.setRequiredInterface("IFilesystem", filesystem.getProvidedInterface("IFilesystem"));
System.out.println("step 4");
album_mobilePhone.setRequiredInterface("IAlbum", album.getProvidedInterface("IAlbum"));
album_mobilePhone.setRequiredInterface("IMobilePhone", mobilePhone.getProvidedInterface("IMobilePhone"));
photo_mobilePhone.setRequiredInterface("IPhoto", photo.getProvidedInterface("IPhoto"));
photo_mobilePhone.setRequiredInterface("IMobilePhone", mobilePhone.getProvidedInterface("IMobilePhone"));
mainMidlet_album.setRequiredInterface("IMobileResources", this );
mainMidlet_exceptionHandler.setRequiredInterface("IMobileResources", this );
exceptionHandler.setRequiredInterface("IMobileResources", mainMidlet_exceptionHandler.getProvidedInterface("IMobileResources"));
album_exceptionhandler.setRequiredInterface("IExceptionHandler", exceptionHandler.getProvidedInterface("IExceptionHandler") );
photo_exceptionhandler.setRequiredInterface("IExceptionHandler", exceptionHandler.getProvidedInterface("IExceptionHandler") );
System.out.println("step 5");
//#ifdef includeSmsFeature
/* [NC] Added in scenario 06 */
int cont = 0;
System.out.println("Setting up SMS feature");
br.unicamp.ic.sed.mobilemedia.sms.spec.prov.IManager sms = br.unicamp.ic.sed.mobilemedia.sms.impl.ComponentFactory.createInstance();
System.out.println(cont++);
//Sms-MobileResources
br.unicamp.ic.sed.mobilemedia.mainuimidlet_sms.impl.IManager mobile_sms = br.unicamp.ic.sed.mobilemedia.mainuimidlet_sms.impl.ComponentFactory.createInstance();
mobile_sms.setRequiredInterface("IMobileResources", this);
sms.setRequiredInterface("IMobileResources", mobile_sms.getProvidedInterface("IMobileResources"));
System.out.println(cont++);
//Photo-Sms
br.unicamp.ic.sed.mobilemedia.sms_photo.IManager photo_sms = br.unicamp.ic.sed.mobilemedia.sms_photo.ComponentFactory.createInstance();
photo_sms.setRequiredInterface("IPhoto", photo.getProvidedInterface("IPhoto"));
photo_sms.setRequiredInterface("ISms", sms.getProvidedInterface("ISms"));
photo.setRequiredInterface("ISms", photo_sms.getProvidedInterface("ISms"));
sms.setRequiredInterface("IPhoto", photo_sms.getProvidedInterface("IPhoto"));
System.out.println(cont++);
//sms-filesystem
br.unicamp.ic.sed.mobilemedia.sms_filesystem.IManager sms_filesystem = br.unicamp.ic.sed.mobilemedia.sms_filesystem.ComponentFactory.createInstance();
System.out.println(cont++);
sms_filesystem.setRequiredInterface("IFilesystem",filesystem.getProvidedInterface("IFilesystem"));
sms.setRequiredInterface("IFilesystem", sms_filesystem.getProvidedInterface("IFilesystem" ));
System.out.println(cont++);
//sms-album
br.unicamp.ic.sed.mobilemedia.sms_album.IManager sms_album = br.unicamp.ic.sed.mobilemedia.sms_album.ComponentFactory.createInstance();
System.out.println(cont++);
sms_album.setRequiredInterface("IAlbum", album.getProvidedInterface("IAlbum"));
sms.setRequiredInterface("IAlbum", sms_album.getProvidedInterface("IAlbum"));
//sms-exceptionhandler
br.unicamp.ic.sed.mobilemedia.sms_exceptionhandler.IManager sms_exceptionHandler = br.unicamp.ic.sed.mobilemedia.sms_exceptionhandler.ComponentFactory.createInstance();
sms_exceptionHandler.setRequiredInterface("IExceptionHandler", exceptionHandler.getProvidedInterface("IExceptionHandler"));
sms.setRequiredInterface("IExceptionHandler", sms_exceptionHandler.getProvidedInterface("IExceptionHandler"));
System.out.println(cont++);
System.out.println("SMS feature configured");
//#endif
IMobilePhone mobPhone = (IMobilePhone) mobilePhone.getProvidedInterface("IMobilePhone");
System.out.println("mobPhone.startUp()");
mobPhone.startUp();
}
/**
* Pause the MIDlet
* This method does nothing at the moment.
*/
public void pauseApp() {
//do nothing
}
/**
* Destroy the MIDlet
*/
public void destroyApp(boolean unconditional) {
notifyDestroyed();
}
public MIDlet getMainMIDlet() {
return this;
}
}
|
|
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Erik Ramfelt, Seiji Sogabe, Martin Eigenbrodt, Alan Harder
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.Extension;
import hudson.Util;
import hudson.diagnosis.OldDataMonitor;
import hudson.model.Descriptor.FormException;
import hudson.model.listeners.ItemListener;
import hudson.search.CollectionSearchIndex;
import hudson.search.SearchIndexBuilder;
import hudson.security.ACL;
import hudson.security.ACLContext;
import hudson.util.CaseInsensitiveComparator;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import hudson.util.HttpResponses;
import hudson.views.ListViewColumn;
import hudson.views.ViewJobFilter;
import java.io.IOException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.annotation.CheckForNull;
import javax.annotation.concurrent.GuardedBy;
import javax.servlet.ServletException;
import jenkins.model.Jenkins;
import jenkins.model.ParameterizedJobMixIn;
import net.sf.json.JSONObject;
import org.jenkinsci.Symbol;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.interceptor.RequirePOST;
/**
* Displays {@link Job}s in a flat list view.
*
* @author Kohsuke Kawaguchi
*/
public class ListView extends View implements DirectlyModifiableView {
/**
* List of job names. This is what gets serialized.
*/
@GuardedBy("this")
/*package*/ /*almost-final*/ SortedSet<String> jobNames = new TreeSet<>(CaseInsensitiveComparator.INSTANCE);
private DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> jobFilters;
private DescribableList<ListViewColumn, Descriptor<ListViewColumn>> columns;
/**
* Include regex string.
*/
private String includeRegex;
/**
* Whether to recurse in ItemGroups
*/
private boolean recurse;
/**
* Compiled include pattern from the includeRegex string.
*/
private transient Pattern includePattern;
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
private Boolean statusFilter;
@DataBoundConstructor
public ListView(String name) {
super(name);
initColumns();
initJobFilters();
}
public ListView(String name, ViewGroup owner) {
this(name);
this.owner = owner;
}
/**
* Sets the columns of this view.
*/
@DataBoundSetter
public void setColumns(List<ListViewColumn> columns) throws IOException {
this.columns.replaceBy(columns);
}
private Object readResolve() {
if(includeRegex!=null) {
try {
includePattern = Pattern.compile(includeRegex);
} catch (PatternSyntaxException x) {
includeRegex = null;
OldDataMonitor.report(this, Collections.singleton(x));
}
}
synchronized(this) {
if (jobNames == null) {
jobNames = new TreeSet<>(CaseInsensitiveComparator.INSTANCE);
}
}
initColumns();
initJobFilters();
return this;
}
protected void initColumns() {
if (columns == null)
columns = new DescribableList<>(this,
ListViewColumn.createDefaultInitialColumnList(getClass())
);
}
protected void initJobFilters() {
if (jobFilters == null)
jobFilters = new DescribableList<>(this);
}
/**
* Used to determine if we want to display the Add button.
*/
public boolean hasJobFilterExtensions() {
return !ViewJobFilter.all().isEmpty();
}
public DescribableList<ViewJobFilter, Descriptor<ViewJobFilter>> getJobFilters() {
return jobFilters;
}
@Override
public DescribableList<ListViewColumn, Descriptor<ListViewColumn>> getColumns() {
return columns;
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
*/
@Override
public List<TopLevelItem> getItems() {
return getItems(this.recurse);
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
* @param recurse {@code false} not to recurse in ItemGroups
* true to recurse in ItemGroups
*/
private List<TopLevelItem> getItems(boolean recurse) {
SortedSet<String> names;
List<TopLevelItem> items = new ArrayList<>();
synchronized (this) {
names = new TreeSet<>(jobNames);
}
ItemGroup<? extends TopLevelItem> parent = getOwner().getItemGroup();
List<TopLevelItem> parentItems = new ArrayList<>(parent.getItems());
includeItems(parent, parentItems, names);
Boolean statusFilter = this.statusFilter; // capture the value to isolate us from concurrent update
Iterable<? extends TopLevelItem> candidates;
if (recurse) {
candidates = parent.getAllItems(TopLevelItem.class);
} else {
candidates = parent.getItems();
}
for (TopLevelItem item : candidates) {
if (!names.contains(item.getRelativeNameFrom(getOwner().getItemGroup()))) continue;
// Add if no status filter or filter matches enabled/disabled status:
if(statusFilter == null || !(item instanceof ParameterizedJobMixIn.ParameterizedJob) // TODO or better to call the more generic Job.isBuildable?
|| ((ParameterizedJobMixIn.ParameterizedJob)item).isDisabled() ^ statusFilter)
items.add(item);
}
// check the filters
Iterable<ViewJobFilter> jobFilters = getJobFilters();
List<TopLevelItem> allItems = new ArrayList<>(parentItems);
if (recurse) allItems = expand(allItems, new ArrayList<>());
for (ViewJobFilter jobFilter: jobFilters) {
items = jobFilter.filter(items, allItems, this);
}
// for sanity, trim off duplicates
items = new ArrayList<>(new LinkedHashSet<>(items));
return items;
}
@Override
public SearchIndexBuilder makeSearchIndex() {
SearchIndexBuilder sib = new SearchIndexBuilder().addAllAnnotations(this);
sib.add(new CollectionSearchIndex<TopLevelItem>() {// for jobs in the view
protected TopLevelItem get(String key) { return getItem(key); }
protected Collection<TopLevelItem> all() { return getItems(); }
@Override
protected String getName(TopLevelItem o) {
// return the name instead of the display for suggestion searching
return o.getName();
}
});
// add the display name for each item in the search index
addDisplayNamesToSearchIndex(sib, getItems(true));
return sib;
}
private List<TopLevelItem> expand(Collection<TopLevelItem> items, List<TopLevelItem> allItems) {
for (TopLevelItem item : items) {
if (item instanceof ItemGroup) {
ItemGroup<? extends Item> ig = (ItemGroup<? extends Item>) item;
expand(Util.filter(ig.getItems(), TopLevelItem.class), allItems);
}
allItems.add(item);
}
return allItems;
}
@Override
public boolean contains(TopLevelItem item) {
return getItems().contains(item);
}
private void includeItems(ItemGroup<? extends TopLevelItem> root, Collection<? extends Item> parentItems, SortedSet<String> names) {
if (includePattern != null) {
for (Item item : parentItems) {
if (recurse && item instanceof ItemGroup) {
ItemGroup<?> ig = (ItemGroup<?>) item;
includeItems(root, ig.getItems(), names);
}
if (item instanceof TopLevelItem) {
String itemName = item.getRelativeNameFrom(root);
if (includePattern.matcher(itemName).matches()) {
names.add(itemName);
}
}
}
}
}
public synchronized boolean jobNamesContains(TopLevelItem item) {
if (item == null) return false;
return jobNames.contains(item.getRelativeNameFrom(getOwner().getItemGroup()));
}
/**
* Adds the given item to this view.
*
* @since 1.389
*/
@Override
public void add(TopLevelItem item) throws IOException {
synchronized (this) {
jobNames.add(item.getRelativeNameFrom(getOwner().getItemGroup()));
}
save();
}
/**
* Removes given item from this view.
*
* @since 1.566
*/
@Override
public boolean remove(TopLevelItem item) throws IOException {
synchronized (this) {
String name = item.getRelativeNameFrom(getOwner().getItemGroup());
if (!jobNames.remove(name)) return false;
}
save();
return true;
}
public String getIncludeRegex() {
return includeRegex;
}
public boolean isRecurse() {
return recurse;
}
/**
* @since 1.568
*/
public void setRecurse(boolean recurse) {
this.recurse = recurse;
}
/**
* Filter by enabled/disabled status of jobs.
* Null for no filter, true for enabled-only, false for disabled-only.
*/
public Boolean getStatusFilter() {
return statusFilter;
}
/**
* Determines the initial state of the checkbox.
*
* @return true when the view is empty or already contains jobs specified by name.
*/
@Restricted(NoExternalUse.class) // called from newJob_button-bar view
@SuppressWarnings("unused") // called from newJob_button-bar view
public boolean isAddToCurrentView() {
synchronized(this) {
return !jobNames.isEmpty() || // There are already items in this view specified by name
(jobFilters.isEmpty() && includePattern == null) // No other way to include items is used
;
}
}
private boolean needToAddToCurrentView(StaplerRequest req) throws ServletException {
String json = req.getParameter("json");
if (json != null && json.length() > 0) {
// Submitted via UI
JSONObject form = req.getSubmittedForm();
return form.has("addToCurrentView") && form.getBoolean("addToCurrentView");
} else {
// Submitted via API
return true;
}
}
@Override
@RequirePOST
public Item doCreateItem(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
ItemGroup<? extends TopLevelItem> ig = getOwner().getItemGroup();
if (ig instanceof ModifiableItemGroup) {
TopLevelItem item = ((ModifiableItemGroup<? extends TopLevelItem>)ig).doCreateItem(req, rsp);
if (item!=null) {
if (needToAddToCurrentView(req)) {
synchronized (this) {
jobNames.add(item.getRelativeNameFrom(getOwner().getItemGroup()));
}
owner.save();
}
}
return item;
}
return null;
}
@Override
@RequirePOST
public HttpResponse doAddJobToView(@QueryParameter String name) throws IOException, ServletException {
checkPermission(View.CONFIGURE);
if(name==null)
throw new Failure("Query parameter 'name' is required");
TopLevelItem item = resolveName(name);
if (item == null)
throw new Failure("Query parameter 'name' does not correspond to a known item");
if (contains(item)) return HttpResponses.ok();
add(item);
owner.save();
return HttpResponses.ok();
}
@Override
@RequirePOST
public HttpResponse doRemoveJobFromView(@QueryParameter String name) throws IOException, ServletException {
checkPermission(View.CONFIGURE);
if(name==null)
throw new Failure("Query parameter 'name' is required");
TopLevelItem item = resolveName(name);
if (item==null)
throw new Failure("Query parameter 'name' does not correspond to a known and readable item");
if (remove(item))
owner.save();
return HttpResponses.ok();
}
private @CheckForNull TopLevelItem resolveName(String name) {
TopLevelItem item = getOwner().getItemGroup().getItem(name);
if (item == null) {
name = Items.getCanonicalName(getOwner().getItemGroup(), name);
item = Jenkins.getInstance().getItemByFullName(name, TopLevelItem.class);
}
return item;
}
/**
* Handles the configuration submission.
*
* Load view-specific properties here.
*/
@Override
protected void submit(StaplerRequest req) throws ServletException, FormException, IOException {
JSONObject json = req.getSubmittedForm();
synchronized (this) {
recurse = json.optBoolean("recurse", true);
jobNames.clear();
Iterable<? extends TopLevelItem> items;
if (recurse) {
items = getOwner().getItemGroup().getAllItems(TopLevelItem.class);
} else {
items = getOwner().getItemGroup().getItems();
}
for (TopLevelItem item : items) {
String relativeNameFrom = item.getRelativeNameFrom(getOwner().getItemGroup());
if(req.getParameter(relativeNameFrom)!=null) {
jobNames.add(relativeNameFrom);
}
}
}
setIncludeRegex(req.getParameter("useincluderegex") != null ? req.getParameter("includeRegex") : null);
if (columns == null) {
columns = new DescribableList<>(this);
}
columns.rebuildHetero(req, json, ListViewColumn.all(), "columns");
if (jobFilters == null) {
jobFilters = new DescribableList<>(this);
}
jobFilters.rebuildHetero(req, json, ViewJobFilter.all(), "jobFilters");
String filter = Util.fixEmpty(req.getParameter("statusFilter"));
statusFilter = filter != null ? "1".equals(filter) : null;
}
/** @since 1.526 */
public void setIncludeRegex(String includeRegex) {
this.includeRegex = Util.nullify(includeRegex);
if (this.includeRegex == null)
this.includePattern = null;
else
this.includePattern = Pattern.compile(includeRegex);
}
@Extension @Symbol("list")
public static class DescriptorImpl extends ViewDescriptor {
@Override
public String getDisplayName() {
return Messages.ListView_DisplayName();
}
/**
* Checks if the include regular expression is valid.
*/
public FormValidation doCheckIncludeRegex( @QueryParameter String value ) throws IOException, ServletException, InterruptedException {
String v = Util.fixEmpty(value);
if (v != null) {
try {
Pattern.compile(v);
} catch (PatternSyntaxException pse) {
return FormValidation.error(pse.getMessage());
}
}
return FormValidation.ok();
}
}
/**
* @deprecated as of 1.391
* Use {@link ListViewColumn#createDefaultInitialColumnList()}
*/
@Deprecated
public static List<ListViewColumn> getDefaultColumns() {
return ListViewColumn.createDefaultInitialColumnList(ListView.class);
}
@Restricted(NoExternalUse.class)
@Extension
public static final class Listener extends ItemListener {
@Override
public void onLocationChanged(final Item item, final String oldFullName, final String newFullName) {
try (ACLContext acl = ACL.as(ACL.SYSTEM)) {
locationChanged(oldFullName, newFullName);
}
}
private void locationChanged(String oldFullName, String newFullName) {
final Jenkins jenkins = Jenkins.getInstance();
locationChanged(jenkins, oldFullName, newFullName);
for (Item g : jenkins.allItems()) {
if (g instanceof ViewGroup) {
locationChanged((ViewGroup) g, oldFullName, newFullName);
}
}
}
private void locationChanged(ViewGroup vg, String oldFullName, String newFullName) {
for (View v : vg.getViews()) {
if (v instanceof ListView) {
renameViewItem(oldFullName, newFullName, vg, (ListView) v);
}
if (v instanceof ViewGroup) {
locationChanged((ViewGroup) v, oldFullName, newFullName);
}
}
}
private void renameViewItem(String oldFullName, String newFullName, ViewGroup vg, ListView lv) {
boolean needsSave;
synchronized (lv) {
Set<String> oldJobNames = new HashSet<>(lv.jobNames);
lv.jobNames.clear();
for (String oldName : oldJobNames) {
lv.jobNames.add(Items.computeRelativeNamesAfterRenaming(oldFullName, newFullName, oldName, vg.getItemGroup()));
}
needsSave = !oldJobNames.equals(lv.jobNames);
}
if (needsSave) { // do not hold ListView lock at the time
try {
lv.save();
} catch (IOException x) {
Logger.getLogger(ListView.class.getName()).log(Level.WARNING, null, x);
}
}
}
@Override
public void onDeleted(final Item item) {
try (ACLContext acl = ACL.as(ACL.SYSTEM)) {
deleted(item);
}
}
private void deleted(Item item) {
final Jenkins jenkins = Jenkins.getInstance();
deleted(jenkins, item);
for (Item g : jenkins.allItems()) {
if (g instanceof ViewGroup) {
deleted((ViewGroup) g, item);
}
}
}
private void deleted(ViewGroup vg, Item item) {
for (View v : vg.getViews()) {
if (v instanceof ListView) {
deleteViewItem(item, vg, (ListView) v);
}
if (v instanceof ViewGroup) {
deleted((ViewGroup) v, item);
}
}
}
private void deleteViewItem(Item item, ViewGroup vg, ListView lv) {
boolean needsSave;
synchronized (lv) {
needsSave = lv.jobNames.remove(item.getRelativeNameFrom(vg.getItemGroup()));
}
if (needsSave) {
try {
lv.save();
} catch (IOException x) {
Logger.getLogger(ListView.class.getName()).log(Level.WARNING, null, x);
}
}
}
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.cli;
import com.facebook.presto.cli.ClientOptions.OutputFormat;
import com.facebook.presto.client.ClientSession;
import com.facebook.presto.sql.parser.StatementSplitter;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.Files;
import io.airlift.command.Command;
import io.airlift.command.HelpOption;
import io.airlift.log.Logging;
import io.airlift.log.LoggingConfiguration;
import jline.console.history.FileHistory;
import jline.console.history.MemoryHistory;
import org.fusesource.jansi.AnsiConsole;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import static com.facebook.presto.cli.Help.getHelpText;
import static com.facebook.presto.sql.parser.StatementSplitter.Statement;
import static com.facebook.presto.sql.parser.StatementSplitter.squeezeStatement;
import static com.google.common.io.ByteStreams.nullOutputStream;
import static io.airlift.log.Logging.Level;
import static java.lang.String.format;
import static jline.internal.Configuration.getUserHome;
@Command(name = "presto", description = "Presto interactive console")
public class Console
implements Runnable
{
private static final String PROMPT_NAME = "presto";
@Inject
public HelpOption helpOption;
@Inject
public ClientOptions clientOptions = new ClientOptions();
@Override
public void run()
{
ClientSession session = clientOptions.toClientSession();
boolean hasQuery = !Strings.isNullOrEmpty(clientOptions.execute);
boolean isFromFile = !Strings.isNullOrEmpty(clientOptions.file);
if (!hasQuery || !isFromFile) {
AnsiConsole.systemInstall();
}
initializeLogging(session.isDebug());
String query = clientOptions.execute;
if (isFromFile) {
if (hasQuery) {
throw new RuntimeException("both --execute and --file specified");
}
try {
query = Files.toString(new File(clientOptions.file), Charsets.UTF_8);
hasQuery = true;
}
catch (IOException e) {
throw new RuntimeException(format("Error reading from file %s: %s", clientOptions.file, e.getMessage()));
}
}
try (QueryRunner queryRunner = QueryRunner.create(session)) {
if (hasQuery) {
executeCommand(queryRunner, query, clientOptions.outputFormat);
}
else {
runConsole(queryRunner, session);
}
}
}
@SuppressWarnings("fallthrough")
private void runConsole(QueryRunner queryRunner, ClientSession session)
{
try (TableNameCompleter tableNameCompleter = new TableNameCompleter(clientOptions.toClientSession(), queryRunner);
LineReader reader = new LineReader(getHistory(), tableNameCompleter)) {
tableNameCompleter.populateCache(session.getSchema());
StringBuilder buffer = new StringBuilder();
while (true) {
// read a line of input from user
String prompt = PROMPT_NAME + ":" + session.getSchema();
if (buffer.length() > 0) {
prompt = Strings.repeat(" ", prompt.length() - 1) + "-";
}
String line = reader.readLine(prompt + "> ");
// add buffer to history and clear on user interrupt
if (reader.interrupted()) {
String partial = squeezeStatement(buffer.toString());
if (!partial.isEmpty()) {
reader.getHistory().add(partial);
}
buffer = new StringBuilder();
continue;
}
// exit on EOF
if (line == null) {
return;
}
// check for special commands if this is the first line
if (buffer.length() == 0) {
String command = line.trim();
if (command.endsWith(";")) {
command = command.substring(0, command.length() - 1).trim();
}
switch (command.toLowerCase()) {
case "exit":
case "quit":
return;
case "help":
System.out.println();
System.out.println(getHelpText());
continue;
}
}
// not a command, add line to buffer
buffer.append(line).append("\n");
// execute any complete statements
String sql = buffer.toString();
StatementSplitter splitter = new StatementSplitter(sql, ImmutableSet.of(";", "\\G"));
for (Statement split : splitter.getCompleteStatements()) {
OutputFormat outputFormat = OutputFormat.ALIGNED;
if (split.terminator().equals("\\G")) {
outputFormat = OutputFormat.VERTICAL;
}
process(queryRunner, split.statement(), outputFormat, true);
reader.getHistory().add(squeezeStatement(split.statement()) + split.terminator());
}
// replace buffer with trailing partial statement
buffer = new StringBuilder();
String partial = splitter.getPartialStatement();
if (!partial.isEmpty()) {
buffer.append(partial).append('\n');
}
}
}
catch (IOException e) {
System.err.println("Readline error: " + e.getMessage());
}
}
private static void executeCommand(QueryRunner queryRunner, String query, OutputFormat outputFormat)
{
StatementSplitter splitter = new StatementSplitter(query + ";");
for (Statement split : splitter.getCompleteStatements()) {
process(queryRunner, split.statement(), outputFormat, false);
}
}
private static void process(QueryRunner queryRunner, String sql, OutputFormat outputFormat, boolean interactive)
{
try (Query query = queryRunner.startQuery(sql)) {
query.renderOutput(System.out, outputFormat, interactive);
}
catch (Exception e) {
System.out.println("Error running command: " + e.getMessage());
if (queryRunner.getSession().isDebug()) {
e.printStackTrace();
}
}
}
private static MemoryHistory getHistory()
{
MemoryHistory history;
File historyFile = new File(getUserHome(), ".presto_history");
try {
history = new FileHistory(historyFile);
}
catch (IOException e) {
System.err.printf("WARNING: Failed to load history file (%s): %s. " +
"History will not be available during this session.%n",
historyFile, e.getMessage());
history = new MemoryHistory();
}
history.setAutoTrim(true);
return history;
}
private static void initializeLogging(boolean debug)
{
// unhook out and err while initializing logging or logger will print to them
PrintStream out = System.out;
PrintStream err = System.err;
try {
if (debug) {
Logging logging = Logging.initialize();
logging.configure(new LoggingConfiguration());
logging.setLevel("com.facebook.presto", Level.DEBUG);
}
else {
System.setOut(nullPrintStream());
System.setErr(nullPrintStream());
Logging logging = Logging.initialize();
logging.configure(new LoggingConfiguration());
logging.disableConsole();
}
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
System.setOut(out);
System.setErr(err);
}
}
private static PrintStream nullPrintStream()
{
return new PrintStream(nullOutputStream());
}
}
|
|
package com.example.aventador.protectalarm.process;
import android.app.Activity;
import com.comthings.gollum.api.gollumandroidlib.callback.GollumCallbackGetBoolean;
import com.example.aventador.protectalarm.callbacks.GollumCallbackGetConfiguration;
import com.example.aventador.protectalarm.process.Runners.GuardianThread;
import com.example.aventador.protectalarm.storage.Configuration;
import com.example.aventador.protectalarm.tools.Logger;
import com.example.aventador.protectalarm.tools.Recaller;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.example.aventador.protectalarm.process.FastProtectionAnalyser.AnalyzerStatus.DECREASE_DB;
import static com.example.aventador.protectalarm.process.FastProtectionAnalyser.AnalyzerStatus.INCREASE_DB;
import static com.example.aventador.protectalarm.tools.Recaller.FAST_PROTECTION_ANALYZER_TAG;
/**
* Created by Giangrasso on 26/10/2017.
*/
/**
* Visibility fixed at Package-private!
*
* FastProtectionAnalyser:
* it calculates the best value in decibel automatically
*
* Process:
* First of all we start at DECREASE_DB mode, as long as an attack is not detected we DECREASE "currentDbTolerance"
* When an attack is detected. we reverse the process by incrementing weakly "currentDbTolerance", it's the INCREASE_DB mode.
* Finally when we don't detect an attack, we cans suppose have found the good db tolerance
*
* Example:
* We start at -10dB
* -10dB -> no Attack detected.
* ...
* -90dB -> We detect an attack at -90dB !
* we reverse at this time we reverse the process
* -88dB -> We detect an attack
* -86dB -> Again
* -84dB -> No attack detected. -84dB is a good value!!
* But it's a limit, so to prevent a potentiel false alarm we go from -84dB to -82dB /!\ IT'S ARBITRARY BUT but it does the job ... /!\
*
*/
class FastProtectionAnalyser {
private static final String TAG = "FastProtectionAnalyser";
private GollumCallbackGetConfiguration cbConfigFound;
private Configuration configuration;
private int nbSequences;
private int nbScans;
private int nbChannels;
private Activity activity;
private final int DB_STEP_DECREASE = -10;
private final int DB_STEP_INCREASE = 2;
private final int DB_TOLERANCE_MINIMUM = -120; // -115db min
private final int DB_TOLERANCE_MAXIMUM = -10; // 10db max
private final int PEAK_TOLERANCE = 40; // 40%
private final int MARGIN_ERROR = 10; // 10%
private GuardianThread guardianThread;
private int currentDbTolerance;
private int decibelStep;
private AnalyzerStatus status;
private AtomicBoolean run;
private GollumCallbackGetBoolean cbForceStopDone = null; // null by default. If cbForceStopDone is not null the analyzer is force stopped.
/**
*
* @param activity
* @param frequency
* @param nbScans
* @param nbSequences
* @param nbChannels nb channels required, for Specan
* @param cbConfigurationFound called when FastProtectionAnalyser has found the optimal configuration. {@link Configuration}
*/
public FastProtectionAnalyser(Activity activity, int frequency, int nbScans, int nbSequences, int nbChannels, GollumCallbackGetConfiguration cbConfigurationFound) {
this.activity = activity;
this.nbScans = nbScans;
this.nbSequences = nbSequences;
this.nbChannels = nbChannels;
this.cbConfigFound = cbConfigurationFound;
run = new AtomicBoolean(false);
this.configuration = new Configuration(frequency, DB_TOLERANCE_MINIMUM, PEAK_TOLERANCE, MARGIN_ERROR);
}
/**
* start the analysis process.
* - in DECREASE_DB mode
*/
public void start() {
if (!run.compareAndSet(false, true)) {
return; // one call of run() expected !!!!
}
currentDbTolerance = DB_TOLERANCE_MAXIMUM;
status = DECREASE_DB;
decibelStep = DB_STEP_DECREASE;
cbForceStopDone = null;
startGuardian();
}
/**
* startGuardian create a GuardianThread with a fixed currentDbTolerance
*/
private void startGuardian() {
Logger.d(TAG, "startGuardian()");
Logger.d(TAG, "startGuardian(): currentDbTolerance: " + getCurrentDbTolerance());
guardianThread = new GuardianThread(activity, getNbSequences(), getNbScans(), getNbChannels(), getCurrentDbTolerance(), PEAK_TOLERANCE, MARGIN_ERROR, new GollumCallbackGetBoolean() {
@Override
public void done(boolean b) {
attackDetected();
}
}, new GollumCallbackGetBoolean() {
@Override
public void done(boolean b) {
guardianIsDone();
}
});
guardianThread.start(false);
}
/**
* Indicate if the analyzer shall be stopped
* @return
*/
private boolean forceStopProcess() {
return cbForceStopDone != null;
}
/**
* @param cbForceStopDone Called when the FastProtectionAnalyser process is stopped.
*/
public void stop(GollumCallbackGetBoolean cbForceStopDone) {
Logger.d(TAG, "stop()");
if (!run.get()) {
return;
}
this.cbForceStopDone = cbForceStopDone;
}
/**
* Called when a guardian thread as not found an brute force attack (in this case a false positive)
*/
private void guardianIsDone() {
Logger.d(TAG, "guardian done");
if (forceStopProcess()) {
/*
Check if the analyzer shall be force stopped by user
*/
Logger.d(TAG, "guardianIsDone : forceStopProcess");
run.set(false); // the analyzer doesn't work.
cbForceStopDone.done(true); // force stop is done.
return;
}
if (status == INCREASE_DB) {
/*
If we are in INCREASE_DB, we must break the process because we know the good db tolerance.
recall:
* First of all we start at DECREASE_DB mode, as long as an attack is not detected we DECREASE "currentDbTolerance"
* When an attack is detected. we reverse the process by incrementing weakly "currentDbTolerance", it's the INCREASE_DB mode.
* Finally when we don't detect an attack, we cans suppose have found the good db tolerance
* But it's a limit, so to prevent a potentiel false alarm we add 2 to the currentDbTolerance /!\ IT'S ARBITRARY BUT but it does the job ... /!\
*/
Logger.d(TAG, "guardianIsDone: status == INCREASE_DB: End of process");
configuration.setDbTolerance(getCurrentDbTolerance() + DB_STEP_INCREASE);
Logger.d(TAG, "guardianIsDone: db tolerance: " + configuration.getDbTolerance());
run.set(false);
cbConfigFound.done(true, configuration);
return;
} else {
/*
Otherwise, we add decibelStep to currentDbTolerance
regardless of the mode DECREASE/INCREASE_DB
and we restart the guardian
*/
currentDbTolerance += decibelStep;
startGuardian();
}
}
/**
* Called when an brute force attack is detected (in this case it's a false positive)
*/
private void attackDetected() {
Logger.d(TAG, "attack detected");
guardianThread.kill(); // we kill the guardian.
if (forceStopProcess()) {
/*
Check if the analyzer shall be force stopped by user
if true: stop the analyzer.
call cbForceStopDone
and cancel all pending jobs associated to this process
{@link com.example.aventador.protectalarm.tools.Recaller.FAST_PROTECTION_ANALYZER_TAG}
*/
Logger.d(TAG, "attackDetected : forceStopProcess");
run.set(false);
cbForceStopDone.done(true);
Recaller.getInstance().cancel(FAST_PROTECTION_ANALYZER_TAG);
return;
}
if (status == DECREASE_DB) {
/*
If an attack is detected in DECREASE_DB mode, we toogle to INCREASE_DB mode
in order to refine the db tolerance value.
as long as we detect a brute force attack, we INCREASE the "currentDbTolerance'
*/
Logger.d(TAG, "attackDetected: reverse process, decibelStep fixed to 2");
decibelStep = DB_STEP_INCREASE;
status = INCREASE_DB;
}
checkThreadLater(); // this routine check if guardianThread is dead.
}
/**
* Use Recaller to give the guardian thread time to stop
* When the callback is called, check if the thread is dead.
* If it's not dead, call again Recaller.recallMe().
*/
private void checkThreadLater() {
if (forceStopProcess()) {
cbForceStopDone.done(true);
return;
}
Recaller.getInstance().recallMe(FAST_PROTECTION_ANALYZER_TAG, 1000L, new GollumCallbackGetBoolean() {
@Override
public void done(boolean b) {
/*
Called each 2seconds, and while the guardian thread is not dead.
*/
if (guardianThread.isAlive()) {
Logger.d(TAG, "guardianThread is always alive");
checkThreadLater(); // retry and check for the next time
} else {
Logger.d(TAG, "guardianThread is dead");
/*
Otherwise, we add decibelStep to currentDbTolerance
regardless of the mode DECREASE/INCREASE_DB
and we restart the guardian
*/
currentDbTolerance += decibelStep;
startGuardian();
}
}
});
}
public int getCurrentDbTolerance() {
return currentDbTolerance;
}
public Configuration getConfiguration() {
return configuration;
}
public int getNbSequences() {
return nbSequences;
}
public int getNbScans() {
return nbScans;
}
public int getNbChannels() {
return nbChannels;
}
public int getDecibelStep() {
return decibelStep;
}
enum AnalyzerStatus {
/*
States of the FastProtectionAnalyzer process.
*/
INCREASE_DB, // --> currentDbTolerance will raise up
DECREASE_DB; // --> currentDbTolerance will decrease
}
}
|
|
/*
* This file is part of GoodTime-Industrial, licensed under MIT License (MIT).
*
* Copyright (c) 2015 GoodTime Studio <https://github.com/GoodTimeStudio>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.mcgoodtime.productionline.init;
import com.mcgoodtime.productionline.items.ItemStacks;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.CraftingManager;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraft.item.crafting.IRecipe;
import net.minecraftforge.fml.common.registry.GameRegistry;
import java.util.List;
import java.util.Map;
import ic2.api.item.IC2Items;
//import ic2.core.AdvRecipe;
/**
* The class for loading all the recipes of GoodTime-Industrial. Migrated from old loaders.
*
* @author liach
*/
public class PLRecipes {
private static final float XP = 2F;
/**
* Load recipes of GoodTime-Industrial.
*/
public static void init() {
//disable recipes
//disable();
//vanilla recipe registry;
GameRegistry.addRecipe(
ItemStacks.copyStack(PLBlocks.opticalGlass, 5),
"ABA",
"BAB",
"ABA",
'A', Blocks.QUARTZ_BLOCK,
'B', Blocks.GLASS
);
GameRegistry.addRecipe(
PLItems.advSolarLensUnit,
"A A",
"A A",
" A ",
'A', PLBlocks.opticalGlass
);
GameRegistry.addRecipe(
PLItems.advSolarLensGroup,
"A A",
"ABA",
"CAC",
'A', PLItems.advSolarLensUnit,
'B', IC2Items.getItem("dust", "diamond"),
'C', IC2Items.getItem("crafting", "carbon_plate")
);
GameRegistry.addRecipe(
PLItems.advSolarLensCluster,
"ABA",
"ACA",
"DAD",
'A', PLItems.advSolarLensGroup,
'B', PLItems.dustIridium,
'C', IC2Items.getItem("dust", "energium")
);
GameRegistry.addRecipe(
PLItems.heatInsulationMaterial,
"AAA",
"AAA",
"BBB",
'A', IC2Items.getItem("rubber"),
'B', IC2Items.getItem("advIronIngot"));
GameRegistry.addRecipe(
new ItemStack(PLItems.diamondApple, 1, 1),
"DDD",
"DAD",
"DDD",
'D', PLItems.diamondApple,
'A', Blocks.DIAMOND_BLOCK
);
//smelting registry
GameRegistry.addSmelting(PLBlocks.oreIridium, PLItems.ingotIridium, XP);
GameRegistry.addSmelting(PLItems.dustIridium, PLItems.ingotIridium, XP);
GameRegistry.addSmelting(IC2Items.getItem("iridiumOre"), PLItems.ingotIridium, XP);
//ic2 recipe registry
/*
Recipes.compressor.addRecipe(
new RecipeInputItemStack(PLItems.getItems(PLItems.smallCompressedWaterHyacinth, 8)),
null,
PLBlocks.compressedWaterHyacinth
);
Recipes.compressor.addRecipe(
new RecipeInputItemStack(new ItemStack(PLBlocks.waterHyacinth, 8)),
null,
PLItems.smallCompressedWaterHyacinth
);
Recipes.metalformerRolling.addRecipe(
new RecipeInputItemStack(new ItemStack(Items.diamond)),
null,
PLItems.diamondPlate
);
Recipes.metalformerRolling.addRecipe(
new RecipeInputItemStack(PLItems.heatInsulationMaterial),
null,
PLItems.heatInsulationPlate
);
Recipes.compressor.addRecipe(
new RecipeInputItemStack(PLItems.getItems(PLItems.diamondPlate, 9)),
null,
PLItems.denseDiamondPlate
);
Recipes.compressor.addRecipe(
new RecipeInputItemStack(PLItems.getItems(PLItems.smallDustIridium, 8)),
null,
PLItems.ingotIridium
);
Recipes.macerator.addRecipe(
new RecipeInputItemStack(new ItemStack(PLBlocks.oreIridium)),
null,
PLItems.getItems(PLItems.crushedIridium, 2)
);
Recipes.macerator.addRecipe(
new RecipeInputItemStack(PLItems.ingotIridium),
null,
PLItems.dustIridium
);
Recipes.compressor.addRecipe(
new RecipeInputItemStack(IC2Items.getItem("diamondDust"), 3),
null,
PLItems.carbonCrystal
);
Recipes.compressor.addRecipe(
new RecipeInputItemStack(IC2Items.getItem("denseplateobsidian"), 8),
null,
PLItems.obsidianPlateGravityField
);
NBTTagCompound oreWash = new NBTTagCompound();
oreWash.setInteger("amount", 1000);
Recipes.oreWashing.addRecipe(
new RecipeInputItemStack(PLItems.crushedIridium),
oreWash,
PLItems.cleanedCrushedIridium,
StackUtil.copyWithSize(Ic2Items.smallTinDust, 2)
);
Recipes.centrifuge.addRecipe(
new RecipeInputItemStack(PLItems.cleanedCrushedIridium),
null,
PLItems.dustIridium,
PLItems.getItems(PLItems.smallDustIridium, 2)
);
Recipes.cannerBottle.addRecipe(
new RecipeInputItemStack(PLItems.rigidPaperPack),
new RecipeInputItemStack(new ItemStack(PLItems.salt, 9)),
new ItemStack(PLItems.packagedSalt)
);
AdvRecipe.addAndRegister(
PLBlocks.cseu,
"ABA",
"CCC",
"CCC",
'A', IC2Items.getItem("insulatedGoldCableItem"),
'B', IC2Items.getItem("advancedMachine"),
'C', PLItems.ceu
);*/
}
/*
private static void disable() {
disableRecipes(Ic2Items.massFabricator);
disableRecipes(IC2Items.getItem("iridiumPlate"));
}*/
/**
* Disable recipes.
*
* @param itemStack Disable all recipes of this item.
*/
@SuppressWarnings("unchecked")
public static void disableRecipes(ItemStack itemStack) {
List<IRecipe> recipeList = CraftingManager.getInstance().getRecipeList();
for (int i = 0; i < recipeList.size(); i++) {
IRecipe iRecipe = recipeList.get(i);
ItemStack recipesResult = iRecipe.getRecipeOutput();
if (ItemStack.areItemStacksEqual(itemStack, recipesResult)) {
recipeList.remove(i--);
}
}
}
@SuppressWarnings({"unchecked", "SuspiciousMethodCalls"})
public static void disableSmelting(ItemStack itemStack) {
Map<ItemStack, ItemStack> smelting = FurnaceRecipes.instance().getSmeltingList();
smelting.remove(itemStack);
}
}
|
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.08.09 at 09:22:32 PM IST
//
package com.pacificmetrics.ims.apip.qti.item;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.NormalizedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
*
* The HotspotChoice complexType is the container for data about hotspot choices (this ComplexType is intentionally empty). The hotspots that define the choices that can be selected by the candidate. If the delivery system does not support pointer-based selection then the order in which the choices are given must be the order in which they are offered to the candidate for selection. For example, the 'tab order' in simple keyboard navigation. If hotspots overlap then those listed first hide overlapping hotspots that appear later. The default hotspot, if defined, must appear last.
*
*
* <p>Java class for HotspotChoice.Type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="HotspotChoice.Type">
* <complexContent>
* <extension base="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}EmptyPrimitiveType.Type">
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateIdentifier.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}identifier.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}id.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}hotspotLabel.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}shape.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}coords.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}showHide.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}xmllang.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}class.HotspotChoice.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}label.HotspotChoice.Attr"/>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "HotspotChoice.Type")
@XmlRootElement(name = "hotspotChoice")
public class HotspotChoice
extends EmptyPrimitiveTypeType {
@XmlAttribute(name = "templateIdentifier")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String templateIdentifier;
@XmlAttribute(name = "identifier", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String identifier;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
protected String id;
@XmlAttribute(name = "hotspotLabel")
protected String hotspotLabel;
@XmlAttribute(name = "shape", required = true)
protected String shape;
@XmlAttribute(name = "coords", required = true)
@XmlJavaTypeAdapter(NormalizedStringAdapter.class)
@XmlSchemaType(name = "normalizedString")
protected String coords;
@XmlAttribute(name = "showHide")
protected String showHide;
@XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace")
protected String lang;
@XmlAttribute(name = "class")
protected List<String> clazzs;
@XmlAttribute(name = "label")
@XmlJavaTypeAdapter(NormalizedStringAdapter.class)
@XmlSchemaType(name = "normalizedString")
protected String label;
/**
* Gets the value of the templateIdentifier property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTemplateIdentifier() {
return templateIdentifier;
}
/**
* Sets the value of the templateIdentifier property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTemplateIdentifier(String value) {
this.templateIdentifier = value;
}
/**
* Gets the value of the identifier property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIdentifier() {
return identifier;
}
/**
* Sets the value of the identifier property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIdentifier(String value) {
this.identifier = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the hotspotLabel property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getHotspotLabel() {
return hotspotLabel;
}
/**
* Sets the value of the hotspotLabel property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setHotspotLabel(String value) {
this.hotspotLabel = value;
}
/**
* Gets the value of the shape property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShape() {
return shape;
}
/**
* Sets the value of the shape property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShape(String value) {
this.shape = value;
}
/**
* Gets the value of the coords property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCoords() {
return coords;
}
/**
* Sets the value of the coords property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCoords(String value) {
this.coords = value;
}
/**
* Gets the value of the showHide property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShowHide() {
if (showHide == null) {
return "show";
} else {
return showHide;
}
}
/**
* Sets the value of the showHide property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShowHide(String value) {
this.showHide = value;
}
/**
* Gets the value of the lang property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
/**
* Gets the value of the clazzs property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazzs property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazzs().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazzs() {
if (clazzs == null) {
clazzs = new ArrayList<String>();
}
return this.clazzs;
}
/**
* Gets the value of the label property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLabel() {
return label;
}
/**
* Sets the value of the label property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLabel(String value) {
this.label = value;
}
}
|
|
package com.example.tests;
public class AddressData implements Comparable<AddressData> {
private String firstName;
private String lastName;
private String addressFirst;
private String phoneHome;
private String phoneMobile;
private String phoneWork;
private String email;
private String birthDay;
private String birthMonth;
private String birthYear;
private String newGroup;
private String addressSecond;
private String phoneHomeSecond;
public AddressData(String firstName, String lastName, String addressFirst,
String phoneHome, String phoneMobile, String phoneWork,
String email, String birthDay, String birthMonth, String birthYear,
String newGroup, String addressSecond, String phoneHomeSecond) {
this.firstName = firstName;
this.lastName = lastName;
this.addressFirst = addressFirst;
this.phoneHome = phoneHome;
this.phoneMobile = phoneMobile;
this.phoneWork = phoneWork;
this.email = email;
this.birthDay = birthDay;
this.birthMonth = birthMonth;
this.birthYear = birthYear;
this.newGroup = newGroup;
this.addressSecond = addressSecond;
this.phoneHomeSecond = phoneHomeSecond;
}
public AddressData() {
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
//result = prime * result + ((firstName == null) ? 0 : firstName.hashCode());
return result;
}
@Override
public String toString() {
return "AddressData [firstName=" + firstName + ", lastName=" + lastName
+ ", addressFirst=" + addressFirst + ", phoneHome=" + phoneHome
+ ", phoneMobile=" + phoneMobile + ", phoneWork=" + phoneWork
+ ", email=" + email + ", birthDay=" + birthDay
+ ", birthMonth=" + birthMonth + ", birthYear=" + birthYear
+ ", newGroup=" + newGroup + ", addressSecond=" + addressSecond
+ ", phoneHomeSecond=" + phoneHomeSecond + "]";
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AddressData other = (AddressData) obj;
if (firstName == null) {
if (other.firstName != null)
return false;
} else if (!firstName.equals(other.firstName))
return false;
return true;
}
@Override
public int compareTo(AddressData other) {
return this.firstName.toLowerCase().compareTo(other.firstName.toLowerCase());
}
public AddressData withFirstName(String firstName) {
this.firstName = firstName;
return this;
}
public AddressData withLastName(String lastName) {
this.lastName = lastName;
return this;
}
public AddressData withAddressFirst(String addressFirst) {
this.addressFirst = addressFirst;
return this;
}
public AddressData withPhoneHome(String phoneHome) {
this.phoneHome = phoneHome;
return this;
}
public AddressData withPhoneMobile(String phoneMobile) {
this.phoneMobile = phoneMobile;
return this;
}
public AddressData withPhoneWork(String phoneWork) {
this.phoneWork = phoneWork;
return this;
}
public AddressData withEmail(String email) {
this.email = email;
return this;
}
public AddressData withBirthDay(String birthDay) {
this.birthDay = birthDay;
return this;
}
public AddressData withBirthMonth(String birthMonth) {
this.birthMonth = birthMonth;
return this;
}
public AddressData withBirthYear(String birthYear) {
this.birthYear = birthYear;
return this;
}
public AddressData withNewGroup(String newGroup) {
this.newGroup = newGroup;
return this;
}
public AddressData withAddressSecond(String addressSecond) {
this.addressSecond = addressSecond;
return this;
}
public AddressData withPhoneHomeSecond(String phoneHomeSecond) {
this.phoneHomeSecond = phoneHomeSecond;
return this;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public String getAddressFirst() {
return addressFirst;
}
public String getPhoneHome() {
return phoneHome;
}
public String getPhoneMobile() {
return phoneMobile;
}
public String getPhoneWork() {
return phoneWork;
}
public String getEmail() {
return email;
}
public String getBirthDay() {
return birthDay;
}
public String getBirthMonth() {
return birthMonth;
}
public String getBirthYear() {
return birthYear;
}
public String getNewGroup() {
return newGroup;
}
public String getAddressSecond() {
return addressSecond;
}
public String getPhoneHomeSecond() {
return phoneHomeSecond;
}
}
|
|
package scray.client.jdbc;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.StringTokenizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* URL schema for scray connections.
*
* jdbc:scray:[stateless|stateful]://host[,failoverhost]:port/dbSystem/dbId/
* querySpace
*
*/
public class ScrayURL {
Logger log = LoggerFactory.getLogger(this.getClass());
public static final String SCHEME = "jdbc";
public static final String SUBSCHEME = "scray";
public static enum ProtocolModes {
stateful, stateless
}
public URI opaque;
public UriPattern pattern;
public ScrayURL(String url) throws URISyntaxException {
this.opaque = new URI(url);
this.pattern = new UriPattern(opaque);
}
public class UriPattern {
final static String HIER_DELIM = "://";
final static String IP_DELIM = ",";
final static String PORT_DELIM = ":";
private URI[] absoluteUri;
private String fullScheme;
private String mainScheme;
private String schemeExtension;
private String subScheme;
private String hierPart;
private String protocolMode;
private String[] host;
private int port = -1;
private String[] hostAndPort;
private String path = null;
private String dbSystem;
private String dbId;
private String querySpace;
public UriPattern(URI opaqueUri) throws URISyntaxException {
/* extract absolute URI */
mainScheme = opaqueUri.getScheme();
if (!mainScheme.equals(SCHEME)) {
throw new URISyntaxException(mainScheme,
"Invalid URL: faulty main scheme");
}
String schemeSpecificPart = opaqueUri.getSchemeSpecificPart();
int startOfHier = schemeSpecificPart.indexOf(HIER_DELIM);
if (startOfHier <= 0) {
throw new URISyntaxException(opaqueUri.toString(),
"Invalid URL: hierarchical part mismatch");
}
hierPart = schemeSpecificPart.substring(startOfHier
+ HIER_DELIM.length());
int startOfPort = hierPart.indexOf(PORT_DELIM);
if (startOfPort <= 0) {
throw new URISyntaxException(opaqueUri.toString(),
"Invalid URL: hosts part mismatch");
}
String hostsPart = hierPart.substring(0, startOfPort);
String restOfHier = hierPart.substring(startOfPort);
StringTokenizer hostTokens = new StringTokenizer(hostsPart,
IP_DELIM);
absoluteUri = new URI[hostTokens.countTokens()];
int i = 0;
while (hostTokens.hasMoreTokens()) {
absoluteUri[i++] = new URI(mainScheme + HIER_DELIM + hostTokens.nextToken()
+ restOfHier);
}
host = new String[absoluteUri.length];
hostAndPort = new String[absoluteUri.length];
for (int j = 0; j < absoluteUri.length; j++) {
/* extract host and port */
host[j] = absoluteUri[j].getHost();
if (host[j] == null) {
throw new URISyntaxException(host[j],
"Invalid URL: faulty host");
}
if (port == -1) {
port = absoluteUri[j].getPort();
if (port == -1) {
throw new URISyntaxException(String.valueOf(port),
"Invalid URL: faulty port");
}
}
hostAndPort[j] = host[j] + ":" + String.valueOf(port);
/* decompose path */
if (path == null) {
path = absoluteUri[j].getPath();
StringTokenizer pathElems = new StringTokenizer(path, "/");
if (pathElems.countTokens() == 3) {
dbSystem = pathElems.nextToken();
dbId = pathElems.nextToken();
querySpace = pathElems.nextToken();
} else if( pathElems.countTokens() == 1) {
// if there is only a single token, we have a queryspacename, only
querySpace = pathElems.nextToken();
dbSystem = "";
dbId = "";
} else {
throw new URISyntaxException(path,
"Invalid URL: faulty path");
}
}
}
/* decompose extended scheme */
schemeExtension = schemeSpecificPart.substring(0, startOfHier);
fullScheme = mainScheme + ":" + schemeExtension;
StringTokenizer schemeExTokens = new StringTokenizer(
schemeExtension, ":");
if (schemeExTokens.countTokens() != 2)
throw new URISyntaxException(fullScheme,
"Invalid URL: scheme mismatch (must contain three parts)");
subScheme = schemeExTokens.nextToken();
if (!subScheme.equals(SUBSCHEME)) {
throw new URISyntaxException(subScheme,
"Invalid URL: faulty subScheme");
}
protocolMode = schemeExTokens.nextToken();
if (!(protocolMode.equals(ProtocolModes.stateful.name()) || protocolMode
.equals(ProtocolModes.stateless.name()))) {
throw new URISyntaxException(protocolMode,
"Invalid URL: faulty protocolMode");
}
}
public String getMainScheme() {
return mainScheme;
}
public String getSubScheme() {
return subScheme;
}
public String getProtocolMode() {
return protocolMode;
}
public String[] getHost() {
return host;
}
public int getPort() {
return port;
}
public String[] getHostAndPort() {
return hostAndPort;
}
public String getDbSystem() {
return dbSystem;
}
public String getDbId() {
return dbId;
}
public String getQuerySpace() {
return querySpace;
}
}
public String getMainScheme() {
return pattern.getMainScheme();
}
public String getSubScheme() {
return pattern.getSubScheme();
}
public String getProtocolMode() {
return pattern.getProtocolMode();
}
public String[] getHost() {
return pattern.getHost();
}
public int getPort() {
return pattern.getPort();
}
public String[] getHostAndPort() {
return pattern.getHostAndPort();
}
public String getDbSystem() {
return pattern.getDbSystem();
}
public String getDbId() {
return pattern.getDbId();
}
public String getQuerySpace() {
return pattern.getQuerySpace();
}
private URI[] absoluteUri;
private String fullScheme;
private String mainScheme;
private String schemeExtension;
private String subScheme;
private String hierPart;
private String protocolMode;
private String[] host;
private int port = -1;
private String[] hostAndPort;
private String path = null;
private String dbSystem;
private String dbId;
private String querySpace;
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (!(obj instanceof ScrayURL)) {
return false;
}
ScrayURL thatScrayURL = (ScrayURL) obj;
if(!Arrays.equals(this.absoluteUri, thatScrayURL.absoluteUri)) {
return false;
}
if(!Arrays.equals(this.host, thatScrayURL.host)) {
return false;
}
if(!Arrays.equals(this.hostAndPort, thatScrayURL.hostAndPort)) {
return false;
}
return (this.fullScheme == (thatScrayURL.fullScheme) || (this.fullScheme != null && this.fullScheme.equals(thatScrayURL.fullScheme))) &&
(this.mainScheme == (thatScrayURL.mainScheme) || (this.mainScheme != null && this.mainScheme.equals(thatScrayURL.mainScheme))) &&
(this.schemeExtension == (thatScrayURL.schemeExtension) || (this.schemeExtension != null && this.schemeExtension.equals(thatScrayURL.schemeExtension))) &&
(this.subScheme == (thatScrayURL.subScheme) || (this.subScheme != null && this.subScheme.equals(thatScrayURL.subScheme))) &&
(this.hierPart == (thatScrayURL.hierPart) || (this.hierPart != null && this.hierPart.equals(thatScrayURL.hierPart))) &&
(this.protocolMode == (thatScrayURL.protocolMode) || (this.protocolMode != null && this.protocolMode.equals(thatScrayURL.protocolMode))) &&
this.port == thatScrayURL.port &&
(this.path == (thatScrayURL.path) || (this.path != null && this.path.equals(thatScrayURL.path))) &&
(this.dbSystem == (thatScrayURL.dbSystem) || (this.dbSystem != null && this.dbSystem.equals(thatScrayURL.dbSystem))) &&
(this.dbId == (thatScrayURL.dbId) || (this.dbId != null && this.dbId.equals(thatScrayURL.dbId))) &&
(this.querySpace == (thatScrayURL.querySpace) || (this.querySpace != null && this.querySpace.equals(thatScrayURL.querySpace)));
}
@Override
public String toString() {
return "ScrayURL [getMainScheme()=" + getMainScheme()
+ ", getSubScheme()=" + getSubScheme() + ", getProtocolMode()="
+ getProtocolMode() + ", getHost()=" + getHost()
+ ", getPort()=" + getPort() + ", getHostAndPort()="
+ getHostAndPort() + ", getDbSystem()=" + getDbSystem()
+ ", getDbId()=" + getDbId() + ", getQuerySpace()="
+ getQuerySpace() + "]";
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.regression;
import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.BundleUtil;
import org.apache.falcon.regression.core.util.HadoopUtil;
import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.InstancesSummaryResult;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.oozie.client.CoordinatorAction.Status;
import org.apache.oozie.client.OozieClientException;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import javax.xml.bind.JAXBException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.text.ParseException;
import java.util.List;
/** This test currently provide minimum verification. More detailed test should be added:
1. process : test summary single cluster few instance some future some past
2. process : test multiple cluster, full past on one cluster, full future on one cluster,
half future / past on third one
3. feed : same as test 1 for feed
4. feed : same as test 2 for feed
*/
@Test(groups = "embedded")
public class InstanceSummaryTest extends BaseTestClass {
private String baseTestHDFSDir = cleanAndGetTestDir();
private String feedInputPath = baseTestHDFSDir + "/testInputData" + MINUTE_DATE_PATTERN;
private String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator";
private String startTime;
private String endTime;
private ColoHelper cluster3 = servers.get(2);
private Bundle processBundle;
private String processName;
@BeforeClass(alwaysRun = true)
public void createTestData() throws Exception {
uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE);
startTime = TimeUtil.get20roundedTime(TimeUtil.getTimeWrtSystemTime(-20));
endTime = TimeUtil.getTimeWrtSystemTime(60);
String startTimeData = TimeUtil.addMinsToTime(startTime, -100);
List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startTimeData, endTime, 20);
for (FileSystem fs : serverFS) {
HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs);
HadoopUtil.flattenAndPutDataInFolder(fs, OSUtil.NORMAL_INPUT,
Util.getPathPrefix(feedInputPath), dataDates);
}
}
@BeforeMethod(alwaysRun = true)
public void setup() throws Exception {
processBundle = BundleUtil.readELBundle();
processBundle = new Bundle(processBundle, cluster3);
processBundle.generateUniqueBundle(this);
processBundle.setInputFeedDataPath(feedInputPath);
processBundle.setOutputFeedLocationData(baseTestHDFSDir + "/output" + MINUTE_DATE_PATTERN);
processBundle.setProcessWorkflow(aggregateWorkflowDir);
for (int i = 0; i < 3; i++) {
bundles[i] = new Bundle(processBundle, servers.get(i));
bundles[i].generateUniqueBundle(this);
bundles[i].setProcessWorkflow(aggregateWorkflowDir);
}
processName = Util.readEntityName(processBundle.getProcessData());
}
/**
* Schedule single-cluster process. Get its instances summary.
*/
@Test(enabled = true, timeOut = 1200000)
public void testSummarySingleClusterProcess()
throws URISyntaxException, JAXBException, IOException, ParseException,
OozieClientException, AuthenticationException, InterruptedException {
processBundle.setProcessValidity(startTime, endTime);
processBundle.submitFeedsScheduleProcess(prism);
InstanceUtil.waitTillInstancesAreCreated(serverOC.get(2), processBundle.getProcessData(), 0);
// start only at start time
InstancesSummaryResult r = prism.getProcessHelper()
.getInstanceSummary(processName, "?start=" + startTime);
InstanceUtil.waitTillInstanceReachState(serverOC.get(2), processName, 2,
Status.SUCCEEDED, EntityType.PROCESS);
//AssertUtil.assertSucceeded(r);
//start only before process start
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, -100));
//AssertUtil.assertFailed(r,"response should have failed");
//start only after process end
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 120));
//start only at mid specific instance
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 10));
//start only in between 2 instance
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 7));
//start and end at start and end
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
//start in between and end at end
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 14) + "&end=" + endTime);
//start at start and end between
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime, -20));
// start and end in between
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 20)
+ "&end=" + TimeUtil.addMinsToTime(endTime, -13));
//start before start with end in between
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, -100)
+ "&end=" + TimeUtil.addMinsToTime(endTime, -37));
//start in between and end after end
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, 60)
+ "&end=" + TimeUtil.addMinsToTime(endTime, 100));
// both start end out od range
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + TimeUtil.addMinsToTime(startTime, -100)
+ "&end=" + TimeUtil.addMinsToTime(endTime, 100));
// end only
r = prism.getProcessHelper().getInstanceSummary(processName,
"?end=" + TimeUtil.addMinsToTime(endTime, -30));
}
/**
* Adjust multi-cluster process. Submit and schedule it. Get its instances summary.
*/
@Test(enabled = true, timeOut = 1200000)
public void testSummaryMultiClusterProcess() throws JAXBException,
ParseException, IOException, URISyntaxException, AuthenticationException,
InterruptedException {
processBundle.setProcessValidity(startTime, endTime);
processBundle.addClusterToBundle(bundles[1].getClusters().get(0),
ClusterType.SOURCE, null, null);
processBundle.addClusterToBundle(bundles[2].getClusters().get(0),
ClusterType.SOURCE, null, null);
processBundle.submitFeedsScheduleProcess(prism);
InstancesSummaryResult r = prism.getProcessHelper()
.getInstanceSummary(processName, "?start=" + startTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
r = prism.getProcessHelper().getInstanceSummary(processName,
"?start=" + startTime + "&end=" + endTime);
}
/**
* Adjust multi-cluster feed. Submit and schedule it. Get its instances summary.
*/
@Test(enabled = true, timeOut = 1200000)
public void testSummaryMultiClusterFeed() throws JAXBException, ParseException, IOException,
URISyntaxException, OozieClientException, AuthenticationException,
InterruptedException {
//create desired feed
String feed = bundles[0].getDataSets().get(0);
//cluster_1 is target, cluster_2 is source and cluster_3 is neutral
feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
feed = FeedMerlin.fromString(feed).addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
.withRetention("days(100000)", ActionType.DELETE)
.withValidity(startTime, "2099-10-01T12:10Z")
.build()).toString();
feed = FeedMerlin.fromString(feed).addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
.withRetention("days(100000)", ActionType.DELETE)
.withValidity(startTime, "2099-10-01T12:25Z")
.withClusterType(ClusterType.TARGET)
.withDataLocation(feedInputPath)
.build()).toString();
feed = FeedMerlin.fromString(feed).addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
.withRetention("days(100000)", ActionType.DELETE)
.withValidity(startTime, "2099-01-01T00:00Z")
.withClusterType(ClusterType.SOURCE)
.withDataLocation(feedInputPath)
.build()).toString();
//submit clusters
Bundle.submitCluster(bundles[0], bundles[1], bundles[2]);
//create test data on cluster_2
/*InstanceUtil.createDataWithinDatesAndPrefix(cluster2,
InstanceUtil.oozieDateToDate(startTime),
InstanceUtil.oozieDateToDate(InstanceUtil.getTimeWrtSystemTime(60)),
feedInputPath, 1);*/
//submit and schedule feed
prism.getFeedHelper().submitAndSchedule(feed);
InstancesSummaryResult r = prism.getFeedHelper()
.getInstanceSummary(Util.readEntityName(feed), "?start=" + startTime);
r = prism.getFeedHelper().getInstanceSummary(Util.readEntityName(feed),
"?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime, -20));
}
@AfterMethod(alwaysRun = true)
public void tearDown() throws IOException {
removeTestClassEntities();
}
}
|
|
/*
* Copyright 2015-2018 Aleksandr Mashchenko.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amashchenko.eclipse.strutsclipse;
import java.util.Iterator;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IStorage;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.hyperlink.AbstractHyperlinkDetector;
import org.eclipse.jface.text.hyperlink.IHyperlink;
import org.eclipse.ui.IEditorDescriptor;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IPersistableElement;
import org.eclipse.ui.IStorageEditorInput;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.ide.IDE;
import org.eclipse.ui.part.MultiPageEditorPart;
import org.eclipse.ui.texteditor.ITextEditor;
public abstract class AbstractStrutsHyperlinkDetector extends
AbstractHyperlinkDetector {
protected IHyperlink[] linksListToArray(List<IHyperlink> linksList) {
IHyperlink[] links = null;
if (linksList != null && !linksList.isEmpty()) {
// remove null-s
Iterator<IHyperlink> itr = linksList.iterator();
while (itr.hasNext()) {
if (itr.next() == null) {
itr.remove();
}
}
if (!linksList.isEmpty()) {
links = linksList.toArray(new IHyperlink[linksList.size()]);
}
}
return links;
}
// helper method for IHyperlink-s
public static void selectAndReveal(IEditorPart editorPart,
IRegion highlightRange) {
ITextEditor textEditor = null;
if (editorPart instanceof MultiPageEditorPart) {
MultiPageEditorPart part = (MultiPageEditorPart) editorPart;
Object editorPage = part.getSelectedPage();
if (editorPage != null && editorPage instanceof ITextEditor) {
textEditor = (ITextEditor) editorPage;
}
} else if (editorPart instanceof ITextEditor) {
textEditor = (ITextEditor) editorPart;
}
// highlight range in editor if possible
if (highlightRange != null && textEditor != null) {
textEditor.selectAndReveal(highlightRange.getOffset(),
highlightRange.getLength());
}
}
public static class FileHyperlink implements IHyperlink {
private final IFile fFile;
private final IRegion fRegion;
private final IRegion fHighlightRange;
public FileHyperlink(IRegion region, IFile file) {
this(region, file, null);
}
public FileHyperlink(IRegion region, IFile file, IRegion range) {
fRegion = region;
fFile = file;
fHighlightRange = range;
}
@Override
public IRegion getHyperlinkRegion() {
return fRegion;
}
@Override
public String getHyperlinkText() {
return fFile == null ? null : fFile.getProjectRelativePath()
.toString();
}
@Override
public String getTypeLabel() {
return null;
}
@Override
public void open() {
try {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
IEditorPart editor = IDE.openEditor(page, fFile, true);
selectAndReveal(editor, fHighlightRange);
} catch (PartInitException e) {
}
}
}
public static class JavaElementHyperlink implements IHyperlink {
private final IJavaElement fElement;
private final IRegion fRegion;
public JavaElementHyperlink(IRegion region, IJavaElement element) {
fRegion = region;
fElement = element;
}
@Override
public IRegion getHyperlinkRegion() {
return fRegion;
}
@Override
public String getHyperlinkText() {
String name = null;
if (fElement != null) {
if (fElement.getParent() == null) {
name = fElement.getElementName();
} else {
name = fElement.getParent().getElementName() + "#"
+ fElement.getElementName();
}
}
return name;
}
@Override
public String getTypeLabel() {
return null;
}
@Override
public void open() {
try {
JavaUI.openInEditor(fElement);
} catch (PartInitException e) {
} catch (JavaModelException e) {
}
}
}
public static class StorageHyperlink implements IHyperlink {
private final IStorage fStorage;
private final IRegion fRegion;
private final IRegion fHighlightRange;
public StorageHyperlink(IRegion region, IStorage storage, IRegion range) {
fRegion = region;
fStorage = storage;
fHighlightRange = range;
}
@Override
public IRegion getHyperlinkRegion() {
return fRegion;
}
@Override
public String getHyperlinkText() {
return fStorage == null ? null : fStorage.getFullPath().toString();
}
@Override
public String getTypeLabel() {
return null;
}
@Override
public void open() {
try {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
IEditorDescriptor editorDescriptor = IDE
.getEditorDescriptor(fStorage.getName());
IEditorPart editor = page.openEditor(new StorageEditorInput(
fStorage), editorDescriptor.getId());
selectAndReveal(editor, fHighlightRange);
} catch (PartInitException e) {
}
}
}
private static class StorageEditorInput implements IStorageEditorInput {
private final IStorage fStorage;
private StorageEditorInput(IStorage storage) {
fStorage = storage;
}
@Override
public boolean exists() {
return fStorage != null;
}
@Override
public ImageDescriptor getImageDescriptor() {
return null;
}
@Override
public String getName() {
return fStorage.getName();
}
@Override
public IPersistableElement getPersistable() {
return null;
}
@Override
public IStorage getStorage() {
return fStorage;
}
@Override
public String getToolTipText() {
return fStorage.getFullPath() != null ? fStorage.getFullPath()
.toString() : fStorage.getName();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof StorageEditorInput) {
return fStorage.equals(((StorageEditorInput) obj).fStorage);
}
return super.equals(obj);
}
@SuppressWarnings("rawtypes")
@Override
public Object getAdapter(Class adapter) {
return null;
}
}
}
|
|
package org.robolectric.shadows;
import android.R;
import android.app.Activity;
import android.app.Application;
import android.app.Dialog;
import android.app.Fragment;
import android.content.ComponentName;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.text.Selection;
import android.text.SpannableStringBuilder;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import org.robolectric.annotation.HiddenApi;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.annotation.RealObject;
import org.robolectric.fakes.RoboMenuItem;
import org.robolectric.manifest.AndroidManifest;
import org.robolectric.util.ReflectionHelpers;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static android.os.Build.VERSION_CODES.LOLLIPOP;
import static android.os.Build.VERSION_CODES.M;
import static org.robolectric.Shadows.shadowOf;
import static org.robolectric.shadow.api.Shadow.directlyOn;
import static org.robolectric.shadow.api.Shadow.invokeConstructor;
/**
* Shadow for {@link android.app.Activity}.
*/
@Implements(Activity.class)
public class ShadowActivity extends ShadowContextThemeWrapper {
@RealObject
protected Activity realActivity;
private int resultCode;
private Intent resultIntent;
private Activity parent;
private boolean finishWasCalled;
private List<IntentForResult> startedActivitiesForResults = new ArrayList<>();
private Map<Intent.FilterComparison, Integer> intentRequestCodeMap = new HashMap<>();
private int requestedOrientation = -1;
private View currentFocus;
private Integer lastShownDialogId = null;
private int pendingTransitionEnterAnimResId = -1;
private int pendingTransitionExitAnimResId = -1;
private Object lastNonConfigurationInstance;
private Map<Integer, Dialog> dialogForId = new HashMap<>();
private ArrayList<Cursor> managedCursors = new ArrayList<>();
private int mDefaultKeyMode = Activity.DEFAULT_KEYS_DISABLE;
private SpannableStringBuilder mDefaultKeySsb = null;
private int streamType = -1;
private boolean mIsTaskRoot = true;
private Menu optionsMenu;
private ComponentName callingActivity;
public void __constructor__() {
invokeConstructor(Activity.class, realActivity);
}
public void setApplication(Application application) {
ReflectionHelpers.setField(realActivity, "mApplication", application);
}
public boolean setThemeFromManifest() {
ShadowApplication shadowApplication = shadowOf(realActivity.getApplication());
AndroidManifest appManifest = shadowApplication.getAppManifest();
if (appManifest == null) return false;
String themeRef = appManifest.getThemeRef(realActivity.getClass().getName());
if (themeRef != null) {
int themeRes = realActivity.getResources().getIdentifier(themeRef.replace("@", ""), "style", appManifest.getPackageName());
realActivity.setTheme(themeRes);
return true;
}
return false;
}
public void setCallingActivity(ComponentName activityName) {
callingActivity = activityName;
}
@Implementation
public ComponentName getCallingActivity() {
return callingActivity;
}
@Implementation
public void setDefaultKeyMode(int keyMode) {
mDefaultKeyMode = keyMode;
// Some modes use a SpannableStringBuilder to track & dispatch input events
// This list must remain in sync with the switch in onKeyDown()
switch (mDefaultKeyMode) {
case Activity.DEFAULT_KEYS_DISABLE:
case Activity.DEFAULT_KEYS_SHORTCUT:
mDefaultKeySsb = null; // not used in these modes
break;
case Activity.DEFAULT_KEYS_DIALER:
case Activity.DEFAULT_KEYS_SEARCH_LOCAL:
case Activity.DEFAULT_KEYS_SEARCH_GLOBAL:
mDefaultKeySsb = new SpannableStringBuilder();
Selection.setSelection(mDefaultKeySsb, 0);
break;
default:
throw new IllegalArgumentException();
}
}
public int getDefaultKeymode() {
return mDefaultKeyMode;
}
@Implementation
public final void setResult(int resultCode) {
this.resultCode = resultCode;
}
@Implementation
public final void setResult(int resultCode, Intent data) {
this.resultCode = resultCode;
this.resultIntent = data;
}
@Implementation
public LayoutInflater getLayoutInflater() {
return LayoutInflater.from(realActivity);
}
@Implementation
public MenuInflater getMenuInflater() {
return new MenuInflater(realActivity);
}
/**
* Checks to ensure that the{@code contentView} has been set
*
* @param id ID of the view to find
* @return the view
* @throws RuntimeException if the {@code contentView} has not been called first
*/
@Implementation
public View findViewById(int id) {
return getWindow().findViewById(id);
}
@Implementation
public final Activity getParent() {
return parent;
}
/**
* Allow setting of Parent fragmentActivity (for unit testing purposes only)
*
* @param parent Parent fragmentActivity to set on this fragmentActivity
*/
@HiddenApi @Implementation
public void setParent(Activity parent) {
this.parent = parent;
}
@Implementation
public void onBackPressed() {
finish();
}
@Implementation
public void finish() {
finishWasCalled = true;
}
@Implementation(minSdk = LOLLIPOP)
public void finishAndRemoveTask() {
finishWasCalled = true;
}
public void resetIsFinishing() {
finishWasCalled = false;
}
/**
* @return whether {@link #finish()} was called
*/
@Implementation
public boolean isFinishing() {
return finishWasCalled;
}
/**
* Constructs a new Window (a {@link com.android.internal.policy.impl.PhoneWindow}) if no window has previously been
* set.
*
* @return the window associated with this Activity
*/
@Implementation
public Window getWindow() {
Window window = directlyOn(realActivity, Activity.class).getWindow();
if (window == null) {
try {
window = ShadowWindow.create(realActivity);
setWindow(window);
} catch (Exception e) {
throw new RuntimeException("Window creation failed!", e);
}
}
return window;
}
public void setWindow(Window window) {
ReflectionHelpers.setField(realActivity, "mWindow", window);
}
@Implementation
public void runOnUiThread(Runnable action) {
ShadowApplication.getInstance().getForegroundThreadScheduler().post(action);
}
@Implementation
public void setRequestedOrientation(int requestedOrientation) {
if (getParent() != null) {
getParent().setRequestedOrientation(requestedOrientation);
} else {
this.requestedOrientation = requestedOrientation;
}
}
@Implementation
public int getRequestedOrientation() {
if (getParent() != null) {
return getParent().getRequestedOrientation();
} else {
return this.requestedOrientation;
}
}
@Implementation
public int getTaskId() {
return 0;
}
/**
* Non-Android accessor.
*
* @return the {@code contentView} set by one of the {@code setContentView()} methods
*/
public View getContentView() {
return ((ViewGroup) getWindow().findViewById(R.id.content)).getChildAt(0);
}
/**
* Non-Android accessor.
*
* @return the {@code resultCode} set by one of the {@code setResult()} methods
*/
public int getResultCode() {
return resultCode;
}
/**
* Non-Android accessor.
*
* @return the {@code Intent} set by {@link #setResult(int, android.content.Intent)}
*/
public Intent getResultIntent() {
return resultIntent;
}
/**
* Non-Android accessor consumes and returns the next {@code Intent} on the
* started activities for results stack.
*
* @return the next started {@code Intent} for an activity, wrapped in
* an {@link ShadowActivity.IntentForResult} object
*/
public IntentForResult getNextStartedActivityForResult() {
if (startedActivitiesForResults.isEmpty()) {
return null;
} else {
return startedActivitiesForResults.remove(0);
}
}
/**
* Non-Android accessor returns the most recent {@code Intent} started by
* {@link #[[#]]#startActivityForResult(android.content.Intent, int)} without
* consuming it.
*
* @return the most recently started {@code Intent}, wrapped in
* an {@link ShadowActivity.IntentForResult} object
*/
public IntentForResult peekNextStartedActivityForResult() {
if (startedActivitiesForResults.isEmpty()) {
return null;
} else {
return startedActivitiesForResults.get(0);
}
}
@Implementation
public Object getLastNonConfigurationInstance() {
return lastNonConfigurationInstance;
}
public void setLastNonConfigurationInstance(Object lastNonConfigurationInstance) {
this.lastNonConfigurationInstance = lastNonConfigurationInstance;
}
/**
* Non-Android accessor.
*
* @param view View to focus.
*/
public void setCurrentFocus(View view) {
currentFocus = view;
}
@Implementation
public View getCurrentFocus() {
return currentFocus;
}
public int getPendingTransitionEnterAnimationResourceId() {
return pendingTransitionEnterAnimResId;
}
public int getPendingTransitionExitAnimationResourceId() {
return pendingTransitionExitAnimResId;
}
@Implementation
public boolean onCreateOptionsMenu(Menu menu) {
optionsMenu = menu;
return directlyOn(realActivity, Activity.class).onCreateOptionsMenu(menu);
}
/**
* Return the options menu.
*
* @return Options menu.
*/
public Menu getOptionsMenu() {
return optionsMenu;
}
/**
* Perform a click on a menu item.
*
* @param menuItemResId Menu item resource ID.
* @return True if the click was handled, false otherwise.
*/
public boolean clickMenuItem(int menuItemResId) {
if (optionsMenu == null) {
throw new RuntimeException(
"Activity does not have an options menu! Did you forget to call " +
"super.onCreateOptionsMenu(menu) in " + realActivity.getClass().getName() + "?");
}
final RoboMenuItem item = new RoboMenuItem(menuItemResId);
return realActivity.onMenuItemSelected(Window.FEATURE_OPTIONS_PANEL, item);
}
/**
* Container object to hold an Intent, together with the requestCode used
* in a call to {@code Activity.startActivityForResult(Intent, int)}
*/
public class IntentForResult {
public Intent intent;
public int requestCode;
public Bundle options;
public IntentForResult(Intent intent, int requestCode) {
this.intent = intent;
this.requestCode = requestCode;
this.options = null;
}
public IntentForResult(Intent intent, int requestCode, Bundle options) {
this.intent = intent;
this.requestCode = requestCode;
this.options = options;
}
}
@Implementation
public void startActivities(Intent[] intents, Bundle options) {
for (int i = intents.length - 1; i >= 0; i--) {
ShadowApplication.getInstance().startActivity(intents[i], options);
}
}
@Implementation
public void startActivityForResult(Intent intent, int requestCode) {
intentRequestCodeMap.put(new Intent.FilterComparison(intent), requestCode);
startedActivitiesForResults.add(new IntentForResult(intent, requestCode));
ShadowApplication.getInstance().startActivity(intent);
}
@Implementation
public void startActivityForResult(Intent intent, int requestCode, Bundle options) {
intentRequestCodeMap.put(new Intent.FilterComparison(intent), requestCode);
startedActivitiesForResults.add(new IntentForResult(intent, requestCode, options));
ShadowApplication.getInstance().startActivity(intent);
}
public void receiveResult(Intent requestIntent, int resultCode, Intent resultIntent) {
Integer requestCode = intentRequestCodeMap.get(new Intent.FilterComparison(requestIntent));
if (requestCode == null) {
throw new RuntimeException("No intent matches " + requestIntent + " among " + intentRequestCodeMap.keySet());
}
final ActivityInvoker invoker = new ActivityInvoker();
invoker.call("onActivityResult", Integer.TYPE, Integer.TYPE, Intent.class)
.with(requestCode, resultCode, resultIntent);
}
@Implementation
public final void showDialog(int id) {
showDialog(id, null);
}
@Implementation
public final void dismissDialog(int id) {
final Dialog dialog = dialogForId.get(id);
if (dialog == null) {
throw new IllegalArgumentException();
}
dialog.dismiss();
}
@Implementation
public final void removeDialog(int id) {
dialogForId.remove(id);
}
@Implementation
public final boolean showDialog(int id, Bundle bundle) {
this.lastShownDialogId = id;
Dialog dialog = dialogForId.get(id);
if (dialog == null) {
final ActivityInvoker invoker = new ActivityInvoker();
dialog = (Dialog) invoker.call("onCreateDialog", Integer.TYPE).with(id);
if (dialog == null) {
return false;
}
if (bundle == null) {
invoker.call("onPrepareDialog", Integer.TYPE, Dialog.class).with(id, dialog);
} else {
invoker.call("onPrepareDialog", Integer.TYPE, Dialog.class, Bundle.class).with(id, dialog, bundle);
}
dialogForId.put(id, dialog);
}
dialog.show();
return true;
}
public void setIsTaskRoot(boolean isRoot) {
mIsTaskRoot = isRoot;
}
@Implementation
public final boolean isTaskRoot() {
return mIsTaskRoot;
}
/**
* Non-Android accessor
*
* @return the dialog resource id passed into
* {@code Activity.showDialog(int, Bundle)} or {@code Activity.showDialog(int)}
*/
public Integer getLastShownDialogId() {
return lastShownDialogId;
}
public boolean hasCancelledPendingTransitions() {
return pendingTransitionEnterAnimResId == 0 && pendingTransitionExitAnimResId == 0;
}
@Implementation
public void overridePendingTransition(int enterAnim, int exitAnim) {
pendingTransitionEnterAnimResId = enterAnim;
pendingTransitionExitAnimResId = exitAnim;
}
public Dialog getDialogById(int dialogId) {
return dialogForId.get(dialogId);
}
@Implementation
public void recreate() {
Bundle outState = new Bundle();
final ActivityInvoker invoker = new ActivityInvoker();
invoker.call("onSaveInstanceState", Bundle.class).with(outState);
invoker.call("onPause").withNothing();
invoker.call("onStop").withNothing();
Object nonConfigInstance = invoker.call("onRetainNonConfigurationInstance").withNothing();
setLastNonConfigurationInstance(nonConfigInstance);
invoker.call("onDestroy").withNothing();
invoker.call("onCreate", Bundle.class).with(outState);
invoker.call("onStart").withNothing();
invoker.call("onRestoreInstanceState", Bundle.class).with(outState);
invoker.call("onResume").withNothing();
}
@Implementation
public void startManagingCursor(Cursor c) {
managedCursors.add(c);
}
@Implementation
public void stopManagingCursor(Cursor c) {
managedCursors.remove(c);
}
public List<Cursor> getManagedCursors() {
return managedCursors;
}
@Implementation
public final void setVolumeControlStream(int streamType) {
this.streamType = streamType;
}
@Implementation
public final int getVolumeControlStream() {
return streamType;
}
@Implementation
public void startActivityFromFragment(Fragment fragment, Intent intent, int requestCode) {
startActivityForResult(intent, requestCode);
}
@Implementation
public void startActivityFromFragment(Fragment fragment, Intent intent, int requestCode, Bundle options) {
startActivityForResult(intent, requestCode, options);
}
@Implementation(minSdk = M)
public final void requestPermissions(String[] permissions, int requestCode) {
}
private final class ActivityInvoker {
private Method method;
public ActivityInvoker call(final String methodName, final Class... argumentClasses) {
try {
method = Activity.class.getDeclaredMethod(methodName, argumentClasses);
method.setAccessible(true);
return this;
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public Object withNothing() {
return with();
}
public Object with(final Object... parameters) {
try {
return method.invoke(realActivity, parameters);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException(e);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIESOR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.application.modelling.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.aries.application.modelling.ModellingManager;
import org.apache.aries.application.modelling.ParsedServiceElements;
import org.apache.aries.application.modelling.ParserProxy;
import org.apache.aries.application.modelling.WrappedReferenceMetadata;
import org.apache.aries.application.modelling.WrappedServiceMetadata;
import org.apache.aries.application.modelling.standalone.OfflineModellingFactory;
import org.apache.aries.blueprint.container.NamespaceHandlerRegistry;
import org.apache.aries.blueprint.container.ParserServiceImpl;
import org.apache.aries.blueprint.namespace.NamespaceHandlerRegistryImpl;
import org.apache.aries.blueprint.services.ParserService;
import org.apache.aries.mocks.BundleContextMock;
import org.apache.aries.unittest.mocks.Skeleton;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.osgi.framework.BundleContext;
@RunWith(Parameterized.class)
public class ParserProxyTest {
@Parameters
public static List<Object[]> parserProxies() {
return Arrays.asList(new Object[][] {
{getMockParserServiceProxy()},
{OfflineModellingFactory.getOfflineParserProxy()}});
}
public static ParserProxy getMockParserServiceProxy() {
BundleContext mockCtx = Skeleton.newMock(new BundleContextMock(), BundleContext.class);
NamespaceHandlerRegistry nhri = new NamespaceHandlerRegistryImpl (mockCtx);
ParserService parserService = new ParserServiceImpl(nhri);
mockCtx.registerService(ParserService.class.getName(), parserService, new Hashtable<String, String>());
ParserProxyImpl parserProxyService = new ParserProxyImpl();
parserProxyService.setParserService(parserService);
parserProxyService.setBundleContext(mockCtx);
parserProxyService.setModellingManager(new ModellingManagerImpl());
return parserProxyService;
}
@AfterClass
public static void teardown() {
BundleContextMock.clear();
}
private final ModellingManager _modellingManager;
private final ParserProxy _parserProxy;
private final File resourceDir;
public ParserProxyTest(ParserProxy sut) throws IOException {
_parserProxy = sut;
_modellingManager = new ModellingManagerImpl();
// make sure paths work in Eclipse as well as Maven
if (new File(".").getCanonicalFile().getName().equals("target")) {
resourceDir = new File("../src/test/resources");
} else {
resourceDir = new File("src/test/resources");
}
}
@Test
public void basicTest1() throws Exception {
File bpXml = new File (resourceDir, "appModeller/test1.eba/bundle1.jar/OSGI-INF/blueprint/bp.xml");
File bp2Xml = new File (resourceDir, "appModeller/test1.eba/bundle1.jar/OSGI-INF/blueprint/bp2.xml");
List<URL> urls = new ArrayList<URL>();
urls.add ((bpXml.toURI()).toURL());
urls.add ((bp2Xml.toURI()).toURL());
List<? extends WrappedServiceMetadata> results = _parserProxy.parse(urls);
assertTrue ("Four results expected, not " + results.size(), results.size() == 4);
Set<WrappedServiceMetadata> resultSet = new HashSet<WrappedServiceMetadata>(results);
Set<WrappedServiceMetadata> expectedResults = getTest1ExpectedResults();
assertEquals ("Blueprint parsed xml is not as expected: " + resultSet.toString() + " != " + expectedResults,
resultSet, expectedResults);
}
@Test
public void testParseAllServiceElements() throws Exception {
File bpXml = new File (resourceDir, "appModeller/test1.eba/bundle1.jar/OSGI-INF/blueprint/bp.xml");
File bp2Xml = new File (resourceDir, "appModeller/test1.eba/bundle1.jar/OSGI-INF/blueprint/bp2.xml");
List<WrappedServiceMetadata> services = new ArrayList<WrappedServiceMetadata>();
List<WrappedReferenceMetadata> references = new ArrayList<WrappedReferenceMetadata>();
FileInputStream fis = new FileInputStream (bpXml);
ParsedServiceElements bpelem = _parserProxy.parseAllServiceElements(fis);
services.addAll(bpelem.getServices());
references.addAll(bpelem.getReferences());
fis = new FileInputStream (bp2Xml);
bpelem = _parserProxy.parseAllServiceElements(fis);
services.addAll(bpelem.getServices());
references.addAll(bpelem.getReferences());
// We expect:
// bp.xml: 3 services and 2 references
// bp2.xml: 3 services and a reference list
//
assertTrue ("Six services expected, not " + services.size(), services.size() == 6);
assertTrue ("Three references expected, not " + references.size(), references.size() == 3);
Set<WrappedServiceMetadata> expectedServices = getTest2ExpectedServices();
// ServiceResultSet will contain some services with autogenerated names starting '.' so we can't
// use a straight Set.equals(). We could add the autogenerated names to the expected results but instead
// let's test that differsOnlyByName() works
int serviceMatchesFound = 0;
for (WrappedServiceMetadata result : services) {
Iterator<WrappedServiceMetadata> it = expectedServices.iterator();
while (it.hasNext()) {
WrappedServiceMetadata next = it.next();
if (result.equals(next) || result.identicalOrDiffersOnlyByName(next)) {
serviceMatchesFound++;
it.remove();
}
}
}
assertEquals ("Parsed services are wrong: " + expectedServices + " unmatched ",
6, serviceMatchesFound);
Set<WrappedReferenceMetadata> expectedReferences = getTest2ExpectedReferences();
Set<WrappedReferenceMetadata> results = new HashSet<WrappedReferenceMetadata>(references);
assertTrue ("Parsed references are not as we'd expected: " + results.toString() + " != " + expectedReferences,
results.equals(expectedReferences));
}
@Test
public void checkMultiValues() throws Exception {
File bpXml = new File (resourceDir, "appModeller/test1.eba/bundle1.jar/OSGI-INF/blueprint/bpMultiValues.xml");
List<WrappedServiceMetadata> services = new ArrayList<WrappedServiceMetadata>();
FileInputStream fis = new FileInputStream (bpXml);
ParsedServiceElements bpelem = _parserProxy.parseAllServiceElements(fis);
services.addAll(bpelem.getServices());
assertEquals ("Multi valued service not parsed correctly", services.size(), 1);
WrappedServiceMetadata wsm = services.get(0);
Map<String, Object> props = wsm.getServiceProperties();
String [] intents = (String[]) props.get("service.intents");
assertEquals ("Service.intents[0] wrong", intents[0], "propagatesTransaction");
assertEquals ("Service.intents[1] wrong", intents[1], "confidentiality");
}
// model
// <reference id="fromOutside" interface="foo.bar.MyInjectedService"/>
// <reference-list id="refList1" interface="my.logging.services" filter="(active=true)"/>
//
private Set<WrappedReferenceMetadata> getTest2ExpectedReferences() throws Exception {
Set<WrappedReferenceMetadata> expectedResults = new HashSet<WrappedReferenceMetadata>();
expectedResults.add(_modellingManager.getImportedService(false, "foo.bar.MyInjectedService", null,
null, "fromOutside", false));
expectedResults.add(_modellingManager.getImportedService(true, "foo.bar.MyInjectedService", null,
null, "anotherOptionalReference", false));
expectedResults.add(_modellingManager.getImportedService(false, "my.logging.service", null, "(&(trace=on)(debug=true))", "refList1", true));
return expectedResults;
}
// Test 2 includes anonymous services: the expected results are a superset of test1
private Set<WrappedServiceMetadata> getTest2ExpectedServices() {
Set<WrappedServiceMetadata> expectedResults = getTest1ExpectedResults();
expectedResults.add(_modellingManager.getExportedService("", 0, Arrays.asList("foo.bar.AnonService"), null));
expectedResults.add(_modellingManager.getExportedService("", 0, Arrays.asList("foo.bar.NamedInnerBeanService"), null));
return expectedResults;
}
private Set<WrappedServiceMetadata> getTest1ExpectedResults() {
Set<WrappedServiceMetadata> expectedResults = new HashSet<WrappedServiceMetadata>();
Map<String, Object> props = new HashMap<String, Object>();
props.put ("priority", "9");
props.put("volume", "11");
props.put("property.list", Arrays.asList("1", "2", "3", "2", "1"));
//Deliberately miss off duplicate entries and reorder, the parser should still match this
props.put("property.set", new LinkedHashSet<String>(Arrays.asList("1", "2", "3")));
props.put("property.array", new String[]{"1", "2", "3", "2", "1"});
props.put("osgi.service.blueprint.compname", "myBean");
expectedResults.add(_modellingManager.getExportedService("myService", 0, Arrays.asList("foo.bar.MyService"), props));
props = new HashMap<String, Object>();
props.put ("priority", "7");
props.put ("volume", "11");
props.put ("osgi.service.blueprint.compname", "bean1");
expectedResults.add(_modellingManager.getExportedService("service1.should.be.exported", 0, Arrays.asList("foo.bar.MyService"), props));
props = new HashMap<String, Object>();
props.put ("customer", "pig");
props.put ("osgi.service.blueprint.compname", "bean2");
expectedResults.add(_modellingManager.getExportedService("service2.should.not.be.exported", 0, Arrays.asList("com.acme.Delivery"), props));
props = new HashMap<String, Object>();
props.put ("customer", "pig");
props.put ("target", "rabbit");
props.put ("payload", "excessive");
props.put ("osgi.service.blueprint.compname", "bean3");
expectedResults.add(_modellingManager.getExportedService("bean3", 0, Arrays.asList("com.acme.Delivery"), props));
return expectedResults;
}
}
|
|
package in.srain.cube.views.ptr;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.view.*;
import android.widget.Scroller;
import android.widget.TextView;
import java.util.ArrayList;
import in.srain.cube.views.ptr.indicator.PtrIndicator;
import in.srain.cube.views.ptr.util.PtrCLog;
/**
* This layout view for "Pull to Refresh(Ptr)" support all of the view, you can contain everything you want.
* support: pull to refresh / release to refresh / auto refresh / keep header view while refreshing / hide header view while refreshing
* It defines {@link in.srain.cube.views.ptr.PtrUIHandler}, which allows you customize the UI easily.
*/
public class PtrFrameLayout extends ViewGroup {
public enum Mode {
NONE, REFRESH, LOAD_MORE, BOTH
}
// status enum
public final static byte PTR_STATUS_INIT = 1;
public final static byte PTR_STATUS_PREPARE = 2;
public final static byte PTR_STATUS_LOADING = 3;
public final static byte PTR_STATUS_COMPLETE = 4;
private static final boolean DEBUG_LAYOUT = true;
public static boolean DEBUG = true;
private static int ID = 1;
// auto refresh status
private static byte FLAG_AUTO_REFRESH_AT_ONCE = 0x01;
private static byte FLAG_AUTO_REFRESH_BUT_LATER = 0x01 << 1;
private static byte FLAG_ENABLE_NEXT_PTR_AT_ONCE = 0x01 << 2;
private static byte FLAG_PIN_CONTENT = 0x01 << 3;
private static byte MASK_AUTO_REFRESH = 0x03;
protected final String LOG_TAG = "ptr-frame-" + ++ID;
protected View mContent;
// optional config for define header and content in xml file
private int mHeaderId = 0;
private int mContainerId = 0;
private int mFooterId = 0;
// config
private Mode mMode = Mode.BOTH;
private int mDurationToClose = 200;
private int mDurationToCloseHeader = 1000;
private boolean mKeepHeaderWhenRefresh = true;
private boolean mPullToRefresh = false;
private View mHeaderView;
private View mFooterView;
private PtrUIHandlerHolder mPtrUIHandlerHolder = PtrUIHandlerHolder.create();
private PtrHandler mPtrHandler;
// working parameters
private ScrollChecker mScrollChecker;
private int mPagingTouchSlop;
private int mHeaderHeight;
private int mFooterHeight;
private byte mStatus = PTR_STATUS_INIT;
private boolean mDisableWhenHorizontalMove = false;
private int mFlag = 0x00;
// disable when detect moving horizontally
private boolean mPreventForHorizontal = false;
private MotionEvent mLastMoveEvent;
private PtrUIHandlerHook mRefreshCompleteHook;
private int mLoadingMinTime = 500;
private long mLoadingStartTime = 0;
private PtrIndicator mPtrHeaderIndicator;
private boolean mHasSendCancelEvent = false;
public PtrFrameLayout(Context context) {
this(context, null);
}
public PtrFrameLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public PtrFrameLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mPtrHeaderIndicator = new PtrIndicator();
TypedArray arr = context.obtainStyledAttributes(attrs, R.styleable.PtrFrameLayout, 0, 0);
if (arr != null) {
mHeaderId = arr.getResourceId(R.styleable.PtrFrameLayout_ptr_header, mHeaderId);
mContainerId = arr.getResourceId(R.styleable.PtrFrameLayout_ptr_content, mContainerId);
mFooterId = arr.getResourceId(R.styleable.PtrFrameLayout_ptr_footer, mFooterId);
mPtrHeaderIndicator.setResistance(arr.getFloat(R.styleable.PtrFrameLayout_ptr_resistance, mPtrHeaderIndicator.getResistance()));
mDurationToClose = arr.getInt(R.styleable.PtrFrameLayout_ptr_duration_to_close, mDurationToClose);
mDurationToCloseHeader = arr.getInt(R.styleable.PtrFrameLayout_ptr_duration_to_close_header, mDurationToCloseHeader);
float ratio = mPtrHeaderIndicator.getRatioOfHeaderToHeightRefresh();
ratio = arr.getFloat(R.styleable.PtrFrameLayout_ptr_ratio_of_header_height_to_refresh, ratio);
mPtrHeaderIndicator.setRatioOfHeaderHeightToRefresh(ratio);
mKeepHeaderWhenRefresh = arr.getBoolean(R.styleable.PtrFrameLayout_ptr_keep_header_when_refresh, mKeepHeaderWhenRefresh);
mPullToRefresh = arr.getBoolean(R.styleable.PtrFrameLayout_ptr_pull_to_fresh, mPullToRefresh);
mMode = getModeFromIndex(arr.getInt(R.styleable.PtrFrameLayout_ptr_mode, 4));
arr.recycle();
}
mScrollChecker = new ScrollChecker();
final ViewConfiguration conf = ViewConfiguration.get(getContext());
mPagingTouchSlop = conf.getScaledTouchSlop() * 2;
}
private Mode getModeFromIndex(int index) {
switch (index) {
case 0:
return Mode.NONE;
case 1:
return Mode.REFRESH;
case 2:
return Mode.LOAD_MORE;
case 3:
return Mode.BOTH;
default:
return Mode.BOTH;
}
}
@Override
protected void onFinishInflate() {
final int childCount = getChildCount();
if (childCount > 3) {
throw new IllegalStateException("PtrFrameLayout only can host 3 elements");
} else if (childCount == 3) {
if (mHeaderId != 0 && mHeaderView == null) {
mHeaderView = findViewById(mHeaderId);
}
if (mContainerId != 0 && mContent == null) {
mContent = findViewById(mContainerId);
}
if (mFooterId != 0 && mFooterView == null) {
mFooterView = findViewById(mFooterId);
}
// not specify header or content or footer
if (mContent == null || mHeaderView == null || mFooterView == null) {
final View child1 = getChildAt(0);
final View child2 = getChildAt(1);
final View child3 = getChildAt(2);
// all are not specified
if (mContent == null && mHeaderView == null && mFooterView == null) {
mHeaderView = child1;
mContent = child2;
mFooterView = child3;
}
// only some are specified
else {
ArrayList<View> view = new ArrayList<View>(3) {{
add(child1);
add(child2);
add(child3);
}};
if (mHeaderView != null) {
view.remove(mHeaderView);
}
if (mContent != null) {
view.remove(mContent);
}
if (mFooterView != null) {
view.remove(mFooterView);
}
if (mHeaderView == null && view.size() > 0) {
mHeaderView = view.get(0);
view.remove(0);
}
if (mContent == null && view.size() > 0) {
mContent = view.get(0);
view.remove(0);
}
if (mFooterView == null && view.size() > 0) {
mFooterView = view.get(0);
view.remove(0);
}
}
}
} else if (childCount == 2) { // ignore the footer by default
if (mHeaderId != 0 && mHeaderView == null) {
mHeaderView = findViewById(mHeaderId);
}
if (mContainerId != 0 && mContent == null) {
mContent = findViewById(mContainerId);
}
// not specify header or content
if (mContent == null || mHeaderView == null) {
View child1 = getChildAt(0);
View child2 = getChildAt(1);
if (child1 instanceof PtrUIHandler) {
mHeaderView = child1;
mContent = child2;
} else if (child2 instanceof PtrUIHandler) {
mHeaderView = child2;
mContent = child1;
} else {
// both are not specified
if (mContent == null && mHeaderView == null) {
mHeaderView = child1;
mContent = child2;
}
// only one is specified
else {
if (mHeaderView == null) {
mHeaderView = mContent == child1 ? child2 : child1;
} else {
mContent = mHeaderView == child1 ? child2 : child1;
}
}
}
}
} else if (childCount == 1) {
mContent = getChildAt(0);
} else {
TextView errorView = new TextView(getContext());
errorView.setClickable(true);
errorView.setTextColor(0xffff6600);
errorView.setGravity(Gravity.CENTER);
errorView.setTextSize(20);
errorView.setText("The content view in PtrFrameLayout is empty. Do you forget to specify its id in xml layout file?");
mContent = errorView;
addView(mContent);
}
if (mHeaderView != null) {
mHeaderView.bringToFront();
}
if (mFooterView != null) {
mFooterView.bringToFront();
}
super.onFinishInflate();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
if (DEBUG && DEBUG_LAYOUT) {
PtrCLog.d(LOG_TAG, "onMeasure frame: width: %s, height: %s, padding: %s %s %s %s",
getMeasuredHeight(), getMeasuredWidth(),
getPaddingLeft(), getPaddingRight(), getPaddingTop(), getPaddingBottom());
}
if (mHeaderView != null) {
measureChildWithMargins(mHeaderView, widthMeasureSpec, 0, heightMeasureSpec, 0);
MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams();
mHeaderHeight = mHeaderView.getMeasuredHeight() + lp.topMargin + lp.bottomMargin;
mPtrHeaderIndicator.setHeaderHeight(mHeaderHeight);
}
if (mFooterView != null) {
measureChildWithMargins(mFooterView, widthMeasureSpec, 0, heightMeasureSpec, 0);
MarginLayoutParams lp = (MarginLayoutParams) mFooterView.getLayoutParams();
mFooterHeight = mFooterView.getMeasuredHeight() + lp.topMargin + lp.bottomMargin;
mPtrHeaderIndicator.setFooterHeight(mFooterHeight);
}
if (mContent != null) {
measureContentView(mContent, widthMeasureSpec, heightMeasureSpec);
if (DEBUG && DEBUG_LAYOUT) {
ViewGroup.MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams();
PtrCLog.d(LOG_TAG, "onMeasure content, width: %s, height: %s, margin: %s %s %s %s",
getMeasuredWidth(), getMeasuredHeight(),
lp.leftMargin, lp.topMargin, lp.rightMargin, lp.bottomMargin);
PtrCLog.d(LOG_TAG, "onMeasure, currentPos: %s, lastPos: %s, top: %s",
mPtrHeaderIndicator.getCurrentPosY(), mPtrHeaderIndicator.getLastPosY(), mContent.getTop());
}
}
}
private void measureContentView(View child,
int parentWidthMeasureSpec,
int parentHeightMeasureSpec) {
final MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
final int childWidthMeasureSpec = getChildMeasureSpec(parentWidthMeasureSpec,
getPaddingLeft() + getPaddingRight() + lp.leftMargin + lp.rightMargin, lp.width);
final int childHeightMeasureSpec = getChildMeasureSpec(parentHeightMeasureSpec,
getPaddingTop() + getPaddingBottom() + lp.topMargin, lp.height);
child.measure(childWidthMeasureSpec, childHeightMeasureSpec);
}
@Override
protected void onLayout(boolean flag, int i, int j, int k, int l) {
layoutChildren();
}
private void layoutChildren() {
// because the header and footer can not show at the same time, so when header has a offset, the footer's offset should be 0, vice versa..
int offsetHeaderY;
int offsetFooterY;
if (mPtrHeaderIndicator.isHeader()) {
offsetHeaderY = mPtrHeaderIndicator.getCurrentPosY();
offsetFooterY = 0;
} else {
offsetHeaderY = 0;
offsetFooterY = mPtrHeaderIndicator.getCurrentPosY();
}
int paddingLeft = getPaddingLeft();
int paddingTop = getPaddingTop();
int contentBottom = 0;
if (mHeaderView != null) {
MarginLayoutParams lp = (MarginLayoutParams) mHeaderView.getLayoutParams();
final int left = paddingLeft + lp.leftMargin;
final int top = paddingTop + lp.topMargin + offsetHeaderY - mHeaderHeight;
final int right = left + mHeaderView.getMeasuredWidth();
final int bottom = top + mHeaderView.getMeasuredHeight();
mHeaderView.layout(left, top, right, bottom);
if (DEBUG && DEBUG_LAYOUT) {
PtrCLog.d(LOG_TAG, "onLayout header: %s %s %s %s", left, top, right, bottom);
}
}
if (mContent != null) {
if (isPinContent()) {
offsetHeaderY = 0;
}
MarginLayoutParams lp = (MarginLayoutParams) mContent.getLayoutParams();
final int left = paddingLeft + lp.leftMargin;
final int top = paddingTop + lp.topMargin + offsetHeaderY;
final int right = left + mContent.getMeasuredWidth();
final int bottom = top + mContent.getMeasuredHeight() - (isPinContent() ? 0 : offsetFooterY);
contentBottom = bottom;
if (DEBUG && DEBUG_LAYOUT) {
PtrCLog.d(LOG_TAG, "onLayout content: %s %s %s %s", left, top, right, bottom);
}
mContent.layout(left, top, right, bottom);
}
if (mFooterView != null) {
MarginLayoutParams lp = (MarginLayoutParams) mFooterView.getLayoutParams();
final int left = paddingLeft + lp.leftMargin;
final int top = paddingTop + lp.topMargin + contentBottom - (isPinContent() ? offsetFooterY : 0);
final int right = left + mFooterView.getMeasuredWidth();
final int bottom = top + mFooterView.getMeasuredHeight();
mFooterView.layout(left, top, right, bottom);
if (DEBUG && DEBUG_LAYOUT) {
PtrCLog.d(LOG_TAG, "onLayout footer: %s %s %s %s", left, top, right, bottom);
}
}
}
public boolean dispatchTouchEventSupper(MotionEvent e) {
return super.dispatchTouchEvent(e);
}
@Override
public boolean dispatchTouchEvent(MotionEvent e) {
if (!isEnabled() || mContent == null || mHeaderView == null) {
return dispatchTouchEventSupper(e);
}
int action = e.getAction();
switch (action) {
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
mPtrHeaderIndicator.onRelease();
if (mPtrHeaderIndicator.hasLeftStartPosition()) {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "call onRelease when user release");
}
onRelease(false);
if (mPtrHeaderIndicator.hasMovedAfterPressedDown()) {
sendCancelEvent();
return true;
}
return dispatchTouchEventSupper(e);
} else {
return dispatchTouchEventSupper(e);
}
case MotionEvent.ACTION_DOWN:
mHasSendCancelEvent = false;
mPtrHeaderIndicator.onPressDown(e.getX(), e.getY());
mScrollChecker.abortIfWorking();
mPreventForHorizontal = false;
// The cancel event will be sent once the position is moved.
// So let the event pass to children.
// fix #93, #102
dispatchTouchEventSupper(e);
return true;
case MotionEvent.ACTION_MOVE:
mLastMoveEvent = e;
mPtrHeaderIndicator.onMove(e.getX(), e.getY());
float offsetX = mPtrHeaderIndicator.getOffsetX();
float offsetY = mPtrHeaderIndicator.getOffsetY();
if (mDisableWhenHorizontalMove && !mPreventForHorizontal && (Math.abs(offsetX) > mPagingTouchSlop && Math.abs(offsetX) > Math.abs(offsetY))) {
if (mPtrHeaderIndicator.isInStartPosition()) {
mPreventForHorizontal = true;
}
}
if (mPreventForHorizontal) {
return dispatchTouchEventSupper(e);
}
boolean moveDown = offsetY > 0;
boolean moveUp = !moveDown;
boolean canMoveUp = mPtrHeaderIndicator.isHeader() && mPtrHeaderIndicator.hasLeftStartPosition(); // if the header is showing
boolean canMoveDown = mFooterView != null && !mPtrHeaderIndicator.isHeader() && mPtrHeaderIndicator.hasLeftStartPosition(); // if the footer is showing
boolean canHeaderMoveDown = mPtrHandler != null && mPtrHandler.checkCanDoRefresh(this, mContent, mHeaderView) && (mMode.ordinal() & 1) > 0;
boolean canFooterMoveUp = mPtrHandler != null && mFooterView != null // The footer view could be null, so need double check
&& mPtrHandler instanceof PtrHandler2 && ((PtrHandler2) mPtrHandler).checkCanDoLoadMore(this, mContent, mFooterView) && (mMode.ordinal() & 2) > 0;
if (DEBUG) {
PtrCLog.v(LOG_TAG, "ACTION_MOVE: offsetY:%s, currentPos: %s, moveUp: %s, canMoveUp: %s, moveDown: %s: canMoveDown: %s canHeaderMoveDown: %s canFooterMoveUp: %s", offsetY, mPtrHeaderIndicator.getCurrentPosY(), moveUp, canMoveUp, moveDown, canMoveDown, canHeaderMoveDown, canFooterMoveUp);
}
// if either the header and footer are not showing
if (!canMoveUp && !canMoveDown) {
// disable move when header not reach top
if (moveDown && !canHeaderMoveDown) {
return dispatchTouchEventSupper(e);
}
if (moveUp && !canFooterMoveUp) {
return dispatchTouchEventSupper(e);
}
// should show up header
if (moveDown) {
moveHeaderPos(offsetY);
return true;
}
// should show up footer
if (moveUp) {
moveFooterPos(offsetY);
return true;
}
}
// if header is showing, then no need to move footer
if (canMoveUp) {
moveHeaderPos(offsetY);
return true;
}
// if footer is showing, then no need to move header
if (canMoveDown) {
moveFooterPos(offsetY);
return true;
}
}
return dispatchTouchEventSupper(e);
}
private void moveFooterPos(float deltaY) {
mPtrHeaderIndicator.setIsHeader(false);
// to keep the consistence with refresh, need to converse the deltaY
movePos(-deltaY);
}
private void moveHeaderPos(float deltaY) {
mPtrHeaderIndicator.setIsHeader(true);
movePos(deltaY);
}
/**
* if deltaY > 0, move the content down
*
* @param deltaY
*/
private void movePos(float deltaY) {
// has reached the top
if ((deltaY < 0 && mPtrHeaderIndicator.isInStartPosition())) {
if (DEBUG) {
PtrCLog.e(LOG_TAG, String.format("has reached the top"));
}
return;
}
int to = mPtrHeaderIndicator.getCurrentPosY() + (int) deltaY;
// over top
if (mPtrHeaderIndicator.willOverTop(to)) {
if (DEBUG) {
PtrCLog.e(LOG_TAG, String.format("over top"));
}
to = PtrIndicator.POS_START;
}
mPtrHeaderIndicator.setCurrentPos(to);
int change = to - mPtrHeaderIndicator.getLastPosY();
updatePos(mPtrHeaderIndicator.isHeader() ? change : -change);
}
private void updatePos(int change) {
if (change == 0) {
return;
}
boolean isUnderTouch = mPtrHeaderIndicator.isUnderTouch();
// once moved, cancel event will be sent to child
if (isUnderTouch && !mHasSendCancelEvent && mPtrHeaderIndicator.hasMovedAfterPressedDown()) {
mHasSendCancelEvent = true;
sendCancelEvent();
}
// leave initiated position or just refresh complete
if ((mPtrHeaderIndicator.hasJustLeftStartPosition() && mStatus == PTR_STATUS_INIT) ||
(mPtrHeaderIndicator.goDownCrossFinishPosition() && mStatus == PTR_STATUS_COMPLETE && isEnabledNextPtrAtOnce())) {
mStatus = PTR_STATUS_PREPARE;
mPtrUIHandlerHolder.onUIRefreshPrepare(this);
if (DEBUG) {
PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag);
}
}
// back to initiated position
if (mPtrHeaderIndicator.hasJustBackToStartPosition()) {
tryToNotifyReset();
// recover event to children
if (isUnderTouch) {
sendDownEvent();
}
}
// Pull to Refresh
if (mStatus == PTR_STATUS_PREPARE) {
// reach fresh height while moving from top to bottom
if (isUnderTouch && !isAutoRefresh() && mPullToRefresh
&& mPtrHeaderIndicator.crossRefreshLineFromTopToBottom()) {
tryToPerformRefresh();
}
// reach header height while auto refresh
if (performAutoRefreshButLater() && mPtrHeaderIndicator.hasJustReachedHeaderHeightFromTopToBottom()) {
tryToPerformRefresh();
}
}
if (DEBUG) {
PtrCLog.v(LOG_TAG, "updatePos: change: %s, current: %s last: %s, top: %s, headerHeight: %s",
change, mPtrHeaderIndicator.getCurrentPosY(), mPtrHeaderIndicator.getLastPosY(), mContent.getTop(), mHeaderHeight);
}
if (mPtrHeaderIndicator.isHeader()) {
mHeaderView.offsetTopAndBottom(change);
} else {
mFooterView.offsetTopAndBottom(change);
}
if (!isPinContent()) {
mContent.offsetTopAndBottom(change);
}
invalidate();
if (mPtrUIHandlerHolder.hasHandler()) {
mPtrUIHandlerHolder.onUIPositionChange(this, isUnderTouch, mStatus, mPtrHeaderIndicator);
}
onPositionChange(isUnderTouch, mStatus, mPtrHeaderIndicator);
}
protected void onPositionChange(boolean isInTouching, byte status, PtrIndicator mPtrIndicator) {
}
@SuppressWarnings("unused")
public int getHeaderHeight() {
return mHeaderHeight;
}
private void onRelease(boolean stayForLoading) {
tryToPerformRefresh();
if (mStatus == PTR_STATUS_LOADING) {
// keep header for fresh
if (mKeepHeaderWhenRefresh) {
// scroll header back
if (mPtrHeaderIndicator.isOverOffsetToKeepHeaderWhileLoading() && !stayForLoading) {
mScrollChecker.tryToScrollTo(mPtrHeaderIndicator.getOffsetToKeepHeaderWhileLoading(), mDurationToClose);
} else {
// do nothing
}
} else {
tryScrollBackToTopWhileLoading();
}
} else {
if (mStatus == PTR_STATUS_COMPLETE) {
notifyUIRefreshComplete(false);
} else {
tryScrollBackToTopAbortRefresh();
}
}
}
/**
* please DO REMEMBER resume the hook
*
* @param hook
*/
public void setRefreshCompleteHook(PtrUIHandlerHook hook) {
mRefreshCompleteHook = hook;
hook.setResumeAction(new Runnable() {
@Override
public void run() {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "mRefreshCompleteHook resume.");
}
notifyUIRefreshComplete(true);
}
});
}
/**
* Scroll back to to if is not under touch
*/
private void tryScrollBackToTop() {
if (!mPtrHeaderIndicator.isUnderTouch() && mPtrHeaderIndicator.hasLeftStartPosition()) {
mScrollChecker.tryToScrollTo(PtrIndicator.POS_START, mDurationToCloseHeader);
}
}
/**
* just make easier to understand
*/
private void tryScrollBackToTopWhileLoading() {
tryScrollBackToTop();
}
/**
* just make easier to understand
*/
private void tryScrollBackToTopAfterComplete() {
tryScrollBackToTop();
}
/**
* just make easier to understand
*/
private void tryScrollBackToTopAbortRefresh() {
tryScrollBackToTop();
}
private boolean tryToPerformRefresh() {
if (mStatus != PTR_STATUS_PREPARE) {
return false;
}
//
if ((mPtrHeaderIndicator.isOverOffsetToKeepHeaderWhileLoading() && isAutoRefresh()) || mPtrHeaderIndicator.isOverOffsetToRefresh()) {
mStatus = PTR_STATUS_LOADING;
performRefresh();
}
return false;
}
private void performRefresh() {
mLoadingStartTime = System.currentTimeMillis();
if (mPtrUIHandlerHolder.hasHandler()) {
mPtrUIHandlerHolder.onUIRefreshBegin(this);
if (DEBUG) {
PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshBegin");
}
}
if (mPtrHandler != null) {
if (mPtrHeaderIndicator.isHeader()) {
mPtrHandler.onRefreshBegin(this);
} else {
if (mPtrHandler instanceof PtrHandler2) {
((PtrHandler2) mPtrHandler).onLoadMoreBegin(this);
}
}
}
}
/**
* If at the top and not in loading, reset
*/
private boolean tryToNotifyReset() {
if ((mStatus == PTR_STATUS_COMPLETE || mStatus == PTR_STATUS_PREPARE) && mPtrHeaderIndicator.isInStartPosition()) {
if (mPtrUIHandlerHolder.hasHandler()) {
mPtrUIHandlerHolder.onUIReset(this);
if (DEBUG) {
PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIReset");
}
}
mStatus = PTR_STATUS_INIT;
clearFlag();
return true;
}
return false;
}
protected void onPtrScrollAbort() {
if (mPtrHeaderIndicator.hasLeftStartPosition() && isAutoRefresh()) {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "call onRelease after scroll abort");
}
onRelease(true);
}
}
protected void onPtrScrollFinish() {
if (mPtrHeaderIndicator.hasLeftStartPosition() && isAutoRefresh()) {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "call onRelease after scroll finish");
}
onRelease(true);
}
}
/**
* Detect whether is refreshing.
*
* @return
*/
public boolean isRefreshing() {
return mStatus == PTR_STATUS_LOADING;
}
/**
* Call this when data is loaded.
* The UI will perform complete at once or after a delay, depends on the time elapsed is greater then {@link #mLoadingMinTime} or not.
*/
final public void refreshComplete() {
if (DEBUG) {
PtrCLog.i(LOG_TAG, "refreshComplete");
}
if (mRefreshCompleteHook != null) {
mRefreshCompleteHook.reset();
}
int delay = (int) (mLoadingMinTime - (System.currentTimeMillis() - mLoadingStartTime));
if (delay <= 0) {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "performRefreshComplete at once");
}
performRefreshComplete();
} else {
postDelayed(new Runnable() {
@Override
public void run() {
performRefreshComplete();
}
}, delay);
if (DEBUG) {
PtrCLog.d(LOG_TAG, "performRefreshComplete after delay: %s", delay);
}
}
}
/**
* Do refresh complete work when time elapsed is greater than {@link #mLoadingMinTime}
*/
private void performRefreshComplete() {
mStatus = PTR_STATUS_COMPLETE;
// if is auto refresh do nothing, wait scroller stop
if (mScrollChecker.mIsRunning && isAutoRefresh()) {
// do nothing
if (DEBUG) {
PtrCLog.d(LOG_TAG, "performRefreshComplete do nothing, scrolling: %s, auto refresh: %s",
mScrollChecker.mIsRunning, mFlag);
}
return;
}
notifyUIRefreshComplete(false);
}
/**
* Do real refresh work. If there is a hook, execute the hook first.
*
* @param ignoreHook
*/
private void notifyUIRefreshComplete(boolean ignoreHook) {
/**
* After hook operation is done, {@link #notifyUIRefreshComplete} will be call in resume action to ignore hook.
*/
if (mPtrHeaderIndicator.hasLeftStartPosition() && !ignoreHook && mRefreshCompleteHook != null) {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "notifyUIRefreshComplete mRefreshCompleteHook run.");
}
mRefreshCompleteHook.takeOver();
return;
}
if (mPtrUIHandlerHolder.hasHandler()) {
if (DEBUG) {
PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshComplete");
}
mPtrUIHandlerHolder.onUIRefreshComplete(this);
}
mPtrHeaderIndicator.onUIRefreshComplete();
tryScrollBackToTopAfterComplete();
tryToNotifyReset();
}
public void autoRefresh() {
autoRefresh(true, mDurationToCloseHeader);
}
public void autoRefresh(boolean atOnce) {
autoRefresh(atOnce, mDurationToCloseHeader);
}
private void clearFlag() {
// remove auto fresh flag
mFlag = mFlag & ~MASK_AUTO_REFRESH;
}
public void autoLoadMore() {
autoRefresh(true, mDurationToCloseHeader, false);
}
public void autoLoadMore(boolean atOnce) {
autoRefresh(atOnce, mDurationToCloseHeader, false);
}
public void autoRefresh(boolean atOnce, int duration) {
autoRefresh(atOnce, duration, true);
}
public void autoRefresh(boolean atOnce, int duration, boolean isHeader) {
if (mStatus != PTR_STATUS_INIT) {
return;
}
mFlag |= atOnce ? FLAG_AUTO_REFRESH_AT_ONCE : FLAG_AUTO_REFRESH_BUT_LATER;
mStatus = PTR_STATUS_PREPARE;
if (mPtrUIHandlerHolder.hasHandler()) {
mPtrUIHandlerHolder.onUIRefreshPrepare(this);
if (DEBUG) {
PtrCLog.i(LOG_TAG, "PtrUIHandler: onUIRefreshPrepare, mFlag %s", mFlag);
}
}
mPtrHeaderIndicator.setIsHeader(isHeader);
mScrollChecker.tryToScrollTo(mPtrHeaderIndicator.getOffsetToRefresh(), duration);
if (atOnce) {
mStatus = PTR_STATUS_LOADING;
performRefresh();
}
}
public boolean isAutoRefresh() {
return (mFlag & MASK_AUTO_REFRESH) > 0;
}
private boolean performAutoRefreshButLater() {
return (mFlag & MASK_AUTO_REFRESH) == FLAG_AUTO_REFRESH_BUT_LATER;
}
/**
* If @param enable has been set to true. The user can perform next PTR at once.
*
* @param enable
*/
public void setEnabledNextPtrAtOnce(boolean enable) {
if (enable) {
mFlag = mFlag | FLAG_ENABLE_NEXT_PTR_AT_ONCE;
} else {
mFlag = mFlag & ~FLAG_ENABLE_NEXT_PTR_AT_ONCE;
}
}
public boolean isEnabledNextPtrAtOnce() {
return (mFlag & FLAG_ENABLE_NEXT_PTR_AT_ONCE) > 0;
}
/**
* The content view will now move when {@param pinContent} set to true.
*
* @param pinContent
*/
public void setPinContent(boolean pinContent) {
if (pinContent) {
mFlag = mFlag | FLAG_PIN_CONTENT;
} else {
mFlag = mFlag & ~FLAG_PIN_CONTENT;
}
}
public boolean isPinContent() {
return (mFlag & FLAG_PIN_CONTENT) > 0;
}
/**
* It's useful when working with viewpager.
*
* @param disable
*/
public void disableWhenHorizontalMove(boolean disable) {
mDisableWhenHorizontalMove = disable;
}
/**
* loading will last at least for so long
*
* @param time
*/
public void setLoadingMinTime(int time) {
mLoadingMinTime = time;
}
/**
* Not necessary any longer. Once moved, cancel event will be sent to child.
*
* @param yes
*/
@Deprecated
public void setInterceptEventWhileWorking(boolean yes) {
}
@SuppressWarnings({"unused"})
public View getContentView() {
return mContent;
}
public void setPtrHandler(PtrHandler ptrHandler) {
mPtrHandler = ptrHandler;
}
public void addPtrUIHandler(PtrUIHandler ptrUIHandler) {
PtrUIHandlerHolder.addHandler(mPtrUIHandlerHolder, ptrUIHandler);
}
@SuppressWarnings({"unused"})
public void removePtrUIHandler(PtrUIHandler ptrUIHandler) {
mPtrUIHandlerHolder = PtrUIHandlerHolder.removeHandler(mPtrUIHandlerHolder, ptrUIHandler);
}
public void setPtrHeaderIndicator(PtrIndicator slider) {
if (mPtrHeaderIndicator != null && mPtrHeaderIndicator != slider) {
slider.convertFrom(mPtrHeaderIndicator);
}
mPtrHeaderIndicator = slider;
}
public void setMode(Mode mode) {
mMode = mode;
}
public Mode getMode() {
return mMode;
}
@SuppressWarnings({"unused"})
public float getResistance() {
return mPtrHeaderIndicator.getResistance();
}
public void setResistance(float resistance) {
mPtrHeaderIndicator.setResistance(resistance);
}
@SuppressWarnings({"unused"})
public float getDurationToClose() {
return mDurationToClose;
}
/**
* The duration to return back to the refresh position
*
* @param duration
*/
public void setDurationToClose(int duration) {
mDurationToClose = duration;
}
@SuppressWarnings({"unused"})
public long getDurationToCloseHeader() {
return mDurationToCloseHeader;
}
/**
* The duration to close time
*
* @param duration
*/
public void setDurationToCloseHeader(int duration) {
mDurationToCloseHeader = duration;
}
public void setRatioOfHeaderHeightToRefresh(float ratio) {
mPtrHeaderIndicator.setRatioOfHeaderHeightToRefresh(ratio);
}
public int getOffsetToRefresh() {
return mPtrHeaderIndicator.getOffsetToRefresh();
}
@SuppressWarnings({"unused"})
public void setOffsetToRefresh(int offset) {
mPtrHeaderIndicator.setOffsetToRefresh(offset);
}
@SuppressWarnings({"unused"})
public float getRatioOfHeaderToHeightRefresh() {
return mPtrHeaderIndicator.getRatioOfHeaderToHeightRefresh();
}
@SuppressWarnings({"unused"})
public void setOffsetToKeepHeaderWhileLoading(int offset) {
mPtrHeaderIndicator.setOffsetToKeepHeaderWhileLoading(offset);
}
@SuppressWarnings({"unused"})
public int getOffsetToKeepHeaderWhileLoading() {
return mPtrHeaderIndicator.getOffsetToKeepHeaderWhileLoading();
}
@SuppressWarnings({"unused"})
public boolean isKeepHeaderWhenRefresh() {
return mKeepHeaderWhenRefresh;
}
public void setKeepHeaderWhenRefresh(boolean keepOrNot) {
mKeepHeaderWhenRefresh = keepOrNot;
}
public boolean isPullToRefresh() {
return mPullToRefresh;
}
public void setPullToRefresh(boolean pullToRefresh) {
mPullToRefresh = pullToRefresh;
}
@SuppressWarnings({"unused"})
public View getHeaderView() {
return mHeaderView;
}
public void setHeaderView(View header) {
if (mHeaderView != null && header != null && mHeaderView != header) {
removeView(mHeaderView);
}
ViewGroup.LayoutParams lp = header.getLayoutParams();
if (lp == null) {
lp = new LayoutParams(-1, -2);
header.setLayoutParams(lp);
}
mHeaderView = header;
addView(header);
}
public void setFooterView(View footer) {
if (mFooterView != null && footer != null && mHeaderView != footer) {
removeView(mHeaderView);
}
ViewGroup.LayoutParams lp = footer.getLayoutParams();
if (lp == null) {
lp = new LayoutParams(-1, -2);
footer.setLayoutParams(lp);
}
mFooterView = footer;
addView(footer);
}
@Override
protected boolean checkLayoutParams(ViewGroup.LayoutParams p) {
return p != null && p instanceof LayoutParams;
}
@Override
protected ViewGroup.LayoutParams generateDefaultLayoutParams() {
return new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
@Override
protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) {
return new LayoutParams(p);
}
@Override
public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) {
return new LayoutParams(getContext(), attrs);
}
private void sendCancelEvent() {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "send cancel event");
}
// The ScrollChecker will update position and lead to send cancel event when mLastMoveEvent is null.
// fix #104, #80, #92
if (mLastMoveEvent == null) {
return;
}
MotionEvent last = mLastMoveEvent;
MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime() + ViewConfiguration.getLongPressTimeout(), MotionEvent.ACTION_CANCEL, last.getX(), last.getY(), last.getMetaState());
dispatchTouchEventSupper(e);
}
private void sendDownEvent() {
if (DEBUG) {
PtrCLog.d(LOG_TAG, "send down event");
}
final MotionEvent last = mLastMoveEvent;
MotionEvent e = MotionEvent.obtain(last.getDownTime(), last.getEventTime(), MotionEvent.ACTION_DOWN, last.getX(), last.getY(), last.getMetaState());
dispatchTouchEventSupper(e);
}
public static class LayoutParams extends MarginLayoutParams {
public LayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
}
public LayoutParams(int width, int height) {
super(width, height);
}
@SuppressWarnings({"unused"})
public LayoutParams(MarginLayoutParams source) {
super(source);
}
public LayoutParams(ViewGroup.LayoutParams source) {
super(source);
}
}
class ScrollChecker implements Runnable {
private int mLastFlingY;
private Scroller mScroller;
private boolean mIsRunning = false;
private int mStart;
private int mTo;
public ScrollChecker() {
mScroller = new Scroller(getContext());
}
public void run() {
boolean finish = !mScroller.computeScrollOffset() || mScroller.isFinished();
int curY = mScroller.getCurrY();
int deltaY = curY - mLastFlingY;
if (DEBUG) {
if (deltaY != 0) {
PtrCLog.v(LOG_TAG,
"scroll: %s, start: %s, to: %s, currentPos: %s, current :%s, last: %s, delta: %s",
finish, mStart, mTo, mPtrHeaderIndicator.getCurrentPosY(), curY, mLastFlingY, deltaY);
}
}
if (!finish) {
mLastFlingY = curY;
if (mPtrHeaderIndicator.isHeader()) {
moveHeaderPos(deltaY);
} else {
moveFooterPos(-deltaY);
}
post(this);
} else {
finish();
}
}
private void finish() {
if (DEBUG) {
PtrCLog.v(LOG_TAG, "finish, currentPos:%s", mPtrHeaderIndicator.getCurrentPosY());
}
reset();
onPtrScrollFinish();
}
private void reset() {
mIsRunning = false;
mLastFlingY = 0;
removeCallbacks(this);
}
public void abortIfWorking() {
if (mIsRunning) {
if (!mScroller.isFinished()) {
mScroller.forceFinished(true);
}
onPtrScrollAbort();
reset();
}
}
public void tryToScrollTo(int to, int duration) {
if (mPtrHeaderIndicator.isAlreadyHere(to)) {
return;
}
mStart = mPtrHeaderIndicator.getCurrentPosY();
mTo = to;
int distance = to - mStart;
if (DEBUG) {
PtrCLog.d(LOG_TAG, "tryToScrollTo: start: %s, distance:%s, to:%s", mStart, distance, to);
}
removeCallbacks(this);
mLastFlingY = 0;
// fix #47: Scroller should be reused, https://github.com/liaohuqiu/android-Ultra-Pull-To-Refresh/issues/47
if (!mScroller.isFinished()) {
mScroller.forceFinished(true);
}
mScroller.startScroll(0, 0, 0, distance, duration);
post(this);
mIsRunning = true;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import static org.apache.hadoop.fs.CreateFlag.CREATE;
import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ConfigurationException;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerReacquisitionContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerSignalContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerStartContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.DeletionAsUserContext;
import org.apache.hadoop.yarn.server.nodemanager.executor.LocalizerStartContext;
import org.apache.hadoop.yarn.server.nodemanager.util.LCEResourcesHandler;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
/**
* This is intended to test the LinuxContainerExecutor code, but because of some
* security restrictions this can only be done with some special setup first. <br>
* <ol>
* <li>Compile the code with container-executor.conf.dir set to the location you
* want for testing. <br>
*
* <pre>
* <code>
* > mvn clean install -Pnative -Dcontainer-executor.conf.dir=/etc/hadoop
* -DskipTests
* </code>
* </pre>
*
* <li>Set up <code>${container-executor.conf.dir}/container-executor.cfg</code>
* container-executor.cfg needs to be owned by root and have in it the proper
* config values. <br>
*
* <pre>
* <code>
* > cat /etc/hadoop/container-executor.cfg
* yarn.nodemanager.linux-container-executor.group=mapred
* #depending on the user id of the application.submitter option
* min.user.id=1
* > sudo chown root:root /etc/hadoop/container-executor.cfg
* > sudo chmod 444 /etc/hadoop/container-executor.cfg
* </code>
* </pre>
*
* <li>Move the binary and set proper permissions on it. It needs to be owned by
* root, the group needs to be the group configured in container-executor.cfg,
* and it needs the setuid bit set. (The build will also overwrite it so you
* need to move it to a place that you can support it. <br>
*
* <pre>
* <code>
* > cp ./hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/c/container-executor/container-executor /tmp/
* > sudo chown root:mapred /tmp/container-executor
* > sudo chmod 4050 /tmp/container-executor
* </code>
* </pre>
*
* <li>Run the tests with the execution enabled (The user you run the tests as
* needs to be part of the group from the config. <br>
*
* <pre>
* <code>
* mvn test -Dtest=TestLinuxContainerExecutor -Dapplication.submitter=nobody -Dcontainer-executor.path=/tmp/container-executor
* </code>
* </pre>
*
* <li>The test suite also contains tests to test mounting of CGroups. By
* default, these tests are not run. To run them, add -Dcgroups.mount=<mount-point>
* Please note that the test does not unmount the CGroups at the end of the test,
* since that requires root permissions. <br>
*
* <li>The tests that are run are sensitive to directory permissions. All parent
* directories must be searchable by the user that the tasks are run as. If you
* wish to run the tests in a different directory, please set it using
* -Dworkspace.dir
*
* </ol>
*/
public class TestLinuxContainerExecutor {
private static final Logger LOG =
LoggerFactory.getLogger(TestLinuxContainerExecutor.class);
private static File workSpace;
static {
String basedir = System.getProperty("workspace.dir");
if(basedir == null || basedir.isEmpty()) {
basedir = "target";
}
workSpace = new File(basedir,
TestLinuxContainerExecutor.class.getName() + "-workSpace");
}
private LinuxContainerExecutor exec = null;
private String appSubmitter = null;
private LocalDirsHandlerService dirsHandler;
private Configuration conf;
private FileContext files;
@Before
public void setup() throws Exception {
files = FileContext.getLocalFSFileContext();
Path workSpacePath = new Path(workSpace.getAbsolutePath());
files.mkdir(workSpacePath, null, true);
FileUtil.chmod(workSpace.getAbsolutePath(), "777");
File localDir = new File(workSpace.getAbsoluteFile(), "localDir");
files.mkdir(new Path(localDir.getAbsolutePath()), new FsPermission("777"),
false);
File logDir = new File(workSpace.getAbsoluteFile(), "logDir");
files.mkdir(new Path(logDir.getAbsolutePath()), new FsPermission("777"),
false);
String exec_path = System.getProperty("container-executor.path");
if (exec_path != null && !exec_path.isEmpty()) {
conf = new Configuration(false);
conf.setClass("fs.AbstractFileSystem.file.impl",
org.apache.hadoop.fs.local.LocalFs.class,
org.apache.hadoop.fs.AbstractFileSystem.class);
appSubmitter = System.getProperty("application.submitter");
if (appSubmitter == null || appSubmitter.isEmpty()) {
appSubmitter = "nobody";
}
conf.set(YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY, appSubmitter);
LOG.info("Setting " + YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH
+ "=" + exec_path);
conf.set(YarnConfiguration.NM_LINUX_CONTAINER_EXECUTOR_PATH, exec_path);
exec = new LinuxContainerExecutor();
exec.setConf(conf);
conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, logDir.getAbsolutePath());
dirsHandler = new LocalDirsHandlerService();
dirsHandler.init(conf);
List<String> localDirs = dirsHandler.getLocalDirs();
for (String dir : localDirs) {
Path userDir = new Path(dir, ContainerLocalizer.USERCACHE);
files.mkdir(userDir, new FsPermission("777"), false);
// $local/filecache
Path fileDir = new Path(dir, ContainerLocalizer.FILECACHE);
files.mkdir(fileDir, new FsPermission("777"), false);
}
}
}
@After
public void tearDown() throws Exception {
FileContext.getLocalFSFileContext().delete(
new Path(workSpace.getAbsolutePath()), true);
}
private void cleanupUserAppCache(String user) throws Exception {
List<String> localDirs = dirsHandler.getLocalDirs();
for (String dir : localDirs) {
Path usercachedir = new Path(dir, ContainerLocalizer.USERCACHE);
Path userdir = new Path(usercachedir, user);
Path appcachedir = new Path(userdir, ContainerLocalizer.APPCACHE);
exec.deleteAsUser(new DeletionAsUserContext.Builder()
.setUser(user)
.setSubDir(appcachedir)
.build());
FileContext.getLocalFSFileContext().delete(usercachedir, true);
}
}
private void cleanupUserFileCache(String user) {
List<String> localDirs = dirsHandler.getLocalDirs();
for (String dir : localDirs) {
Path filecache = new Path(dir, ContainerLocalizer.FILECACHE);
Path filedir = new Path(filecache, user);
exec.deleteAsUser(new DeletionAsUserContext.Builder()
.setUser(user)
.setSubDir(filedir)
.build());
}
}
private void cleanupLogDirs(String user) {
List<String> logDirs = dirsHandler.getLogDirs();
for (String dir : logDirs) {
String appId = "APP_" + id;
String containerId = "CONTAINER_" + (id - 1);
Path appdir = new Path(dir, appId);
Path containerdir = new Path(appdir, containerId);
exec.deleteAsUser(new DeletionAsUserContext.Builder()
.setUser(user)
.setSubDir(containerdir)
.build());
}
}
private void cleanupAppFiles(String user) throws Exception {
cleanupUserAppCache(user);
cleanupUserFileCache(user);
cleanupLogDirs(user);
String[] files =
{ "launch_container.sh", "container_tokens", "touch-file" };
Path ws = new Path(workSpace.toURI());
for (String file : files) {
File f = new File(workSpace, file);
if (f.exists()) {
exec.deleteAsUser(new DeletionAsUserContext.Builder()
.setUser(user)
.setSubDir(new Path(file))
.setBasedirs(ws)
.build());
}
}
}
private boolean shouldRun() {
if (exec == null) {
LOG.warn("Not running test because container-executor.path is not set");
return false;
}
return true;
}
private String writeScriptFile(String... cmd) throws IOException {
File f = File.createTempFile("TestLinuxContainerExecutor", ".sh");
f.deleteOnExit();
PrintWriter p = new PrintWriter(new FileOutputStream(f));
p.println("#!/bin/sh");
p.print("exec");
for (String part : cmd) {
p.print(" '");
p.print(part.replace("\\", "\\\\").replace("'", "\\'"));
p.print("'");
}
p.println();
p.close();
return f.getAbsolutePath();
}
private int id = 0;
private synchronized int getNextId() {
id += 1;
return id;
}
private ContainerId getNextContainerId() {
ContainerId cId = mock(ContainerId.class);
String id = "CONTAINER_" + getNextId();
when(cId.toString()).thenReturn(id);
return cId;
}
private int runAndBlock(String... cmd)
throws IOException, ConfigurationException {
return runAndBlock(getNextContainerId(), cmd);
}
private int runAndBlock(ContainerId cId, String... cmd)
throws IOException, ConfigurationException {
String appId = "APP_" + getNextId();
Container container = mock(Container.class);
ContainerLaunchContext context = mock(ContainerLaunchContext.class);
HashMap<String, String> env = new HashMap<String, String>();
when(container.getContainerId()).thenReturn(cId);
when(container.getLaunchContext()).thenReturn(context);
when(context.getEnvironment()).thenReturn(env);
String script = writeScriptFile(cmd);
Path scriptPath = new Path(script);
Path tokensPath = new Path("/dev/null");
Path workDir = new Path(workSpace.getAbsolutePath());
Path pidFile = new Path(workDir, "pid.txt");
exec.activateContainer(cId, pidFile);
return exec.launchContainer(new ContainerStartContext.Builder()
.setContainer(container)
.setNmPrivateContainerScriptPath(scriptPath)
.setNmPrivateTokensPath(tokensPath)
.setUser(appSubmitter)
.setAppId(appId)
.setContainerWorkDir(workDir)
.setLocalDirs(dirsHandler.getLocalDirs())
.setLogDirs(dirsHandler.getLogDirs())
.build());
}
@Test
public void testContainerLocalizer() throws Exception {
Assume.assumeTrue(shouldRun());
String locId = "container_01_01";
Path nmPrivateContainerTokensPath =
dirsHandler
.getLocalPathForWrite(ResourceLocalizationService.NM_PRIVATE_DIR
+ Path.SEPARATOR
+ String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, locId));
files.create(nmPrivateContainerTokensPath, EnumSet.of(CREATE, OVERWRITE));
Configuration config = new YarnConfiguration(conf);
InetSocketAddress nmAddr =
config.getSocketAddr(YarnConfiguration.NM_BIND_HOST,
YarnConfiguration.NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_ADDRESS,
YarnConfiguration.DEFAULT_NM_LOCALIZER_PORT);
String appId = "application_01_01";
exec = new LinuxContainerExecutor() {
@Override
public void buildMainArgs(List<String> command, String user,
String appId, String locId, InetSocketAddress nmAddr,
List<String> localDirs) {
MockContainerLocalizer.buildMainArgs(command, user, appId, locId,
nmAddr, localDirs);
}
};
exec.setConf(conf);
exec.startLocalizer(new LocalizerStartContext.Builder()
.setNmPrivateContainerTokens(nmPrivateContainerTokensPath)
.setNmAddr(nmAddr)
.setUser(appSubmitter)
.setAppId(appId)
.setLocId(locId)
.setDirsHandler(dirsHandler)
.build());
String locId2 = "container_01_02";
Path nmPrivateContainerTokensPath2 =
dirsHandler
.getLocalPathForWrite(ResourceLocalizationService.NM_PRIVATE_DIR
+ Path.SEPARATOR
+ String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, locId2));
files.create(nmPrivateContainerTokensPath2, EnumSet.of(CREATE, OVERWRITE));
exec.startLocalizer(new LocalizerStartContext.Builder()
.setNmPrivateContainerTokens(nmPrivateContainerTokensPath2)
.setNmAddr(nmAddr)
.setUser(appSubmitter)
.setAppId(appId)
.setLocId(locId2)
.setDirsHandler(dirsHandler)
.build());
cleanupUserAppCache(appSubmitter);
}
@Test
public void testContainerLaunch() throws Exception {
Assume.assumeTrue(shouldRun());
String expectedRunAsUser =
conf.get(YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY,
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER);
File touchFile = new File(workSpace, "touch-file");
int ret = runAndBlock("touch", touchFile.getAbsolutePath());
assertEquals(0, ret);
FileStatus fileStatus =
FileContext.getLocalFSFileContext().getFileStatus(
new Path(touchFile.getAbsolutePath()));
assertEquals(expectedRunAsUser, fileStatus.getOwner());
cleanupAppFiles(expectedRunAsUser);
}
@Test
public void testNonSecureRunAsSubmitter() throws Exception {
Assume.assumeTrue(shouldRun());
Assume.assumeFalse(UserGroupInformation.isSecurityEnabled());
String expectedRunAsUser = appSubmitter;
conf.set(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS, "false");
exec.setConf(conf);
File touchFile = new File(workSpace, "touch-file");
int ret = runAndBlock("touch", touchFile.getAbsolutePath());
assertEquals(0, ret);
FileStatus fileStatus =
FileContext.getLocalFSFileContext().getFileStatus(
new Path(touchFile.getAbsolutePath()));
assertEquals(expectedRunAsUser, fileStatus.getOwner());
cleanupAppFiles(expectedRunAsUser);
// reset conf
conf.unset(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS);
exec.setConf(conf);
}
@Test
public void testContainerKill() throws Exception {
Assume.assumeTrue(shouldRun());
final ContainerId sleepId = getNextContainerId();
Thread t = new Thread() {
public void run() {
try {
runAndBlock(sleepId, "sleep", "100");
} catch (IOException|ConfigurationException e) {
LOG.warn("Caught exception while running sleep", e);
}
};
};
t.setDaemon(true); // If it does not exit we shouldn't block the test.
t.start();
assertTrue(t.isAlive());
String pid = null;
int count = 10;
while ((pid = exec.getProcessId(sleepId)) == null && count > 0) {
LOG.info("Sleeping for 200 ms before checking for pid ");
Thread.sleep(200);
count--;
}
assertNotNull(pid);
LOG.info("Going to killing the process.");
exec.signalContainer(new ContainerSignalContext.Builder()
.setUser(appSubmitter)
.setPid(pid)
.setSignal(Signal.TERM)
.build());
LOG.info("sleeping for 100ms to let the sleep be killed");
Thread.sleep(100);
assertFalse(t.isAlive());
cleanupAppFiles(appSubmitter);
}
@Test
public void testCGroups() throws Exception {
Assume.assumeTrue(shouldRun());
String cgroupsMount = System.getProperty("cgroups.mount");
Assume.assumeTrue((cgroupsMount != null) && !cgroupsMount.isEmpty());
assertTrue("Cgroups mount point does not exist", new File(
cgroupsMount).exists());
List<String> cgroupKVs = new ArrayList<>();
String hierarchy = "hadoop-yarn";
String[] controllers = { "cpu", "net_cls" };
for (String controller : controllers) {
cgroupKVs.add(controller + "=" + cgroupsMount + "/" + controller);
assertTrue(new File(cgroupsMount, controller).exists());
}
try {
exec.mountCgroups(cgroupKVs, hierarchy);
for (String controller : controllers) {
assertTrue(controller + " cgroup not mounted", new File(
cgroupsMount + "/" + controller + "/tasks").exists());
assertTrue(controller + " cgroup hierarchy not created",
new File(cgroupsMount + "/" + controller + "/" + hierarchy).exists());
assertTrue(controller + " cgroup hierarchy created incorrectly",
new File(cgroupsMount + "/" + controller + "/" + hierarchy
+ "/tasks").exists());
}
} catch (IOException ie) {
fail("Couldn't mount cgroups " + ie.toString());
throw ie;
}
}
@Test
public void testLocalUser() throws Exception {
Assume.assumeTrue(shouldRun());
try {
// nonsecure default
Configuration conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"simple");
UserGroupInformation.setConfiguration(conf);
LinuxContainerExecutor lce = new LinuxContainerExecutor();
lce.setConf(conf);
Assert.assertEquals(
YarnConfiguration.DEFAULT_NM_NONSECURE_MODE_LOCAL_USER,
lce.getRunAsUser("foo"));
// nonsecure custom setting
conf.set(YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY, "bar");
lce = new LinuxContainerExecutor();
lce.setConf(conf);
Assert.assertEquals("bar", lce.getRunAsUser("foo"));
// nonsecure without limits
conf.set(YarnConfiguration.NM_NONSECURE_MODE_LOCAL_USER_KEY, "bar");
conf.setBoolean(YarnConfiguration.NM_NONSECURE_MODE_LIMIT_USERS, false);
lce = new LinuxContainerExecutor();
lce.setConf(conf);
Assert.assertEquals("foo", lce.getRunAsUser("foo"));
// secure
conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
lce = new LinuxContainerExecutor();
lce.setConf(conf);
Assert.assertEquals("foo", lce.getRunAsUser("foo"));
} finally {
Configuration conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"simple");
UserGroupInformation.setConfiguration(conf);
}
}
@Test
public void testNonsecureUsernamePattern() throws Exception {
Assume.assumeTrue(shouldRun());
try {
// nonsecure default
Configuration conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"simple");
UserGroupInformation.setConfiguration(conf);
LinuxContainerExecutor lce = new LinuxContainerExecutor();
lce.setConf(conf);
lce.verifyUsernamePattern("foo");
try {
lce.verifyUsernamePattern("foo/x");
fail();
} catch (IllegalArgumentException ex) {
// NOP
} catch (Throwable ex) {
fail(ex.toString());
}
// nonsecure custom setting
conf.set(YarnConfiguration.NM_NONSECURE_MODE_USER_PATTERN_KEY, "foo");
lce = new LinuxContainerExecutor();
lce.setConf(conf);
lce.verifyUsernamePattern("foo");
try {
lce.verifyUsernamePattern("bar");
fail();
} catch (IllegalArgumentException ex) {
// NOP
} catch (Throwable ex) {
fail(ex.toString());
}
// secure, pattern matching does not kick in.
conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
lce = new LinuxContainerExecutor();
lce.setConf(conf);
lce.verifyUsernamePattern("foo");
lce.verifyUsernamePattern("foo/w");
} finally {
Configuration conf = new YarnConfiguration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"simple");
UserGroupInformation.setConfiguration(conf);
}
}
@Test(timeout = 10000)
public void testPostExecuteAfterReacquisition() throws Exception {
Assume.assumeTrue(shouldRun());
// make up some bogus container ID
ApplicationId appId = ApplicationId.newInstance(12345, 67890);
ApplicationAttemptId attemptId =
ApplicationAttemptId.newInstance(appId, 54321);
ContainerId cid = ContainerId.newContainerId(attemptId, 9876);
Configuration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.NM_LINUX_CONTAINER_RESOURCES_HANDLER,
TestResourceHandler.class, LCEResourcesHandler.class);
LinuxContainerExecutor lce = new LinuxContainerExecutor();
lce.setConf(conf);
try {
lce.init(null);
} catch (IOException e) {
// expected if LCE isn't setup right, but not necessary for this test
}
Container container = mock(Container.class);
ContainerLaunchContext context = mock(ContainerLaunchContext.class);
HashMap<String, String> env = new HashMap<>();
when(container.getLaunchContext()).thenReturn(context);
when(context.getEnvironment()).thenReturn(env);
lce.reacquireContainer(new ContainerReacquisitionContext.Builder()
.setContainer(container)
.setUser("foouser")
.setContainerId(cid)
.build());
assertTrue("postExec not called after reacquisition",
TestResourceHandler.postExecContainers.contains(cid));
}
private static class TestResourceHandler implements LCEResourcesHandler {
static Set<ContainerId> postExecContainers = new HashSet<ContainerId>();
@Override
public void setConf(Configuration conf) {
}
@Override
public Configuration getConf() {
return null;
}
@Override
public void init(LinuxContainerExecutor lce) throws IOException {
}
@Override
public void preExecute(ContainerId containerId, Resource containerResource)
throws IOException {
}
@Override
public void postExecute(ContainerId containerId) {
postExecContainers.add(containerId);
}
@Override
public String getResourcesOption(ContainerId containerId) {
return null;
}
}
}
|
|
package com.brian.android.myapplication;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.animation.RectEvaluator;
import android.graphics.PointF;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.support.v7.widget.LinearSmoothScroller;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.LinearInterpolator;
import java.util.ArrayList;
import java.util.List;
class BookLayoutManager extends RecyclerView.LayoutManager implements RecyclerView.SmoothScroller.ScrollVectorProvider {
@SuppressWarnings("unused")
private static final String TAG = "BookLayoutManager";
private View pageLeft, pageLeftBottom, pageLeftTop;
private View pageRight, pageRightBottom, pageRightTop;
private int pageLeftPosition;
private int pendingPageLeftPosition;
private int scrollX;
private View retainingPage;
private int retainingPagePosition;
View getPageLeft() {
return pageLeft;
}
View getPageLeftBottom() {
return pageLeftBottom;
}
View getPageLeftTop() {
return pageLeftTop;
}
View getPageRight() {
return pageRight;
}
View getPageRightBottom() {
return pageRightBottom;
}
View getPageRightTop() {
return pageRightTop;
}
boolean isLeftPage(View view) {
return isLeftPage(getPosition(view));
}
void retainPage(int position, Rect retainingRect, long duration) {
if (isLeftPage(position)) {
retainingPage = pageLeft;
pageLeft = null;
} else {
retainingPage = pageRight;
pageRight = null;
}
if (retainingPage == null) {
return;
}
retainingPagePosition = position;
// Re-order, put the retaining page on the top.
detachView(retainingPage);
attachView(retainingPage);
Rect local = new Rect();
retainingPage.getLocalVisibleRect(local);
Rect from = new Rect(local);
ObjectAnimator anim1 = ObjectAnimator.ofObject(retainingPage, "clipBounds", new RectEvaluator(), from, retainingRect);
anim1.setDuration(duration / 4);
anim1.setInterpolator(new LinearInterpolator());
anim1.start();
Rect end = new Rect(retainingRect);
end.left = end.right;
ObjectAnimator anim2 = ObjectAnimator.ofObject (retainingPage, "clipBounds", new RectEvaluator(), retainingRect, end);
anim2.setDuration(150);
anim2.setStartDelay(duration);
anim2.start();
anim2.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
retainingPagePosition = RecyclerView.NO_POSITION;
requestLayout();
}
});
}
@SuppressWarnings("unused")
int getPageLeftPosition() {
return pageLeftPosition;
}
/**
* Find a visible view for snap helper to calculate snap distance.
* Normally, it should be either the left page or the right page.
* @return the visible view.
*/
View findSnapView() {
if (pageLeft != null) {
return pageLeft;
}
if (pageRight != null) {
return pageRight;
}
return null;
}
/**
* Calculate distance for snap helper to snap.
* @param targetView If the view is either pageLeft or pageRight, it is a normal snap.
* If the view is either pageLeftTop or pageRightTop, it is a flip.
* @return the distance to snap.
*/
int[] calculateDistanceToFinalSnap(@NonNull View targetView) {
int turnPageDistance = getTurnPageScrollDistance();
int[] out = new int[2];
if (targetView == pageLeft || targetView == pageRight) {
if (Math.abs(scrollX) < turnPageDistance) {
out[0] = -scrollX;
} else {
out[0] = Integer.signum(scrollX) * (2 * turnPageDistance - Math.abs(scrollX));
}
}
if (targetView == pageLeftTop) {
if (scrollX >= 0) {
out[0] = 2 * turnPageDistance - scrollX;
} else {
out[0] = -scrollX;
}
}
if (targetView == pageRightTop) {
if (scrollX <= 0) {
out[0] = - 2 * turnPageDistance - scrollX;
} else {
out[0] = -scrollX;
}
}
//Log.i(TAG, "calculate distance to final snap, target = " + targetView.getId() + ", rotation = " + targetView.getRotationY() + ", distance = [" + out[0] + ", " + out[1] + "]");
return out;
}
/**
* For snap helper, find item position according to the fling velocity.
* @param velocityX Fling velocity. velocityX > 0 for fling forward.
* @return position of pageLeftTop or pageRightTop if non-null. Otherwise,
* return position of pageLeft or pageRight.
*/
int findTargetSnapPosition(int velocityX) {
if (velocityX != 0) {
if (velocityX > 0) {
if (pageLeftTop != null) {
return getPosition(pageLeftTop);
}
if (pageLeft != null) {
return getPosition(pageLeft);
}
}
if (velocityX < 0) {
if (pageRightTop != null) {
return getPosition(pageRightTop);
}
if (pageRight != null) {
return getPosition(pageRight);
}
}
}
return RecyclerView.NO_POSITION;
}
BookLayoutManager() {
super();
pendingPageLeftPosition = RecyclerView.NO_POSITION;
retainingPagePosition = RecyclerView.NO_POSITION;
}
@Override
public RecyclerView.LayoutParams generateDefaultLayoutParams() {
return new RecyclerView.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
}
@Override
public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) {
int pageLeftPosition = findPageLeftPosition();
fillPages(pageLeftPosition, recycler, state);
turnPageByScroll();
}
@Override
public boolean canScrollHorizontally() {
return true;
}
@Override
public int scrollHorizontallyBy(int dx, RecyclerView.Recycler recycler, RecyclerView.State state) {
if (getChildCount() <= 1) {
return 0;
}
// Update scroll position.
int fullTurnPageScrollDistance = 2 * getTurnPageScrollDistance();
if (scrollX > 0) {
// Scroll forward.
if (scrollX + dx > fullTurnPageScrollDistance) {
//dx = fullTurnPageScrollDistance - scrollX;
scrollX = fullTurnPageScrollDistance;
} else if (scrollX + dx < 0) {
//dx = -scrollX;
scrollX = 0;
} else {
scrollX += dx;
}
} else if (scrollX < 0) {
// Scroll backward.
if (scrollX + dx < -fullTurnPageScrollDistance) {
//dx = -fullTurnPageScrollDistance - scrollX;
scrollX = -fullTurnPageScrollDistance;
} else if (scrollX + dx > 0) {
//dx = -scrollX;
scrollX = 0;
} else {
scrollX += dx;
}
} else {
if (dx > 0 && pageLeftTop == null) {
return 0;
}
if (dx < 0 && pageRightTop == null) {
return 0;
}
// Initial scroll.
scrollX = dx;
if (Math.abs(scrollX) > fullTurnPageScrollDistance) {
scrollX = Integer.signum(dx) * fullTurnPageScrollDistance;
//dx = scrollX;
}
}
checkScrollEnd(recycler, state);
turnPageByScroll();
return dx;
}
@Override
public void scrollToPosition(int position) {
if (position < 0 || position >= getItemCount()) {
return;
}
if (position == pageLeftPosition || position == pageLeftPosition + 1) {
return;
}
boolean isLeftPage = (position - pageLeftPosition) % 2 == 0;
pendingPageLeftPosition = isLeftPage ? position : position - 1;
requestLayout();
}
@Override
public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) {
if (position < 0 || position >= getItemCount()) {
return;
}
if (position == pageLeftPosition || position == pageLeftPosition + 1) {
return;
}
boolean isLeftPage = (position - pageLeftPosition) % 2 == 0;
int targetPosition = isLeftPage ? position : position - 1;
LinearSmoothScroller linearSmoothScroller = new LinearSmoothScroller(recyclerView.getContext()) {
@Override
protected void onTargetFound(View targetView, RecyclerView.State state, Action action) {
boolean forward = getPosition(targetView) >= pageLeftPosition + 2;
int fullTurnPageScrollDistance = 2 * getTurnPageScrollDistance();
final int dx = forward ? fullTurnPageScrollDistance : -fullTurnPageScrollDistance;
final int time = calculateTimeForDeceleration(Math.abs(dx));
if (time > 0) {
action.update(dx, 0, time * 2, mDecelerateInterpolator);
}
}
};
linearSmoothScroller.setTargetPosition(targetPosition);
startSmoothScroll(linearSmoothScroller);
}
@Override
public PointF computeScrollVectorForPosition(int targetPosition) {
if (getChildCount() == 0) {
return null;
}
final int direction = targetPosition < pageLeftPosition ? -1 : 1;
Log.i(TAG, "compute scroll vector for position, position = " + targetPosition + ", current position = " + pageLeftPosition);
return new PointF(direction, 0);
}
private void checkScrollEnd(RecyclerView.Recycler recycler, RecyclerView.State state) {
int fullTurnPageScrollDistance = 2 * getTurnPageScrollDistance();
if (scrollX == fullTurnPageScrollDistance) {
fillPages(pageLeftPosition + 2, recycler, state);
scrollX = 0;
}
if (scrollX == -fullTurnPageScrollDistance) {
fillPages(pageLeftPosition - 2, recycler, state);
scrollX = 0;
}
}
private View addViewFromRecycler(int position, RecyclerView.Recycler recycler, RecyclerView.State state) {
if (position < 0 || position > state.getItemCount() - 1) {
return null;
}
View view = recycler.getViewForPosition(position);
addView(view);
return view;
}
private void layoutLeftPage(View view) {
if (view == null) {
return;
}
RecyclerView.LayoutParams lp = (RecyclerView.LayoutParams) view.getLayoutParams();
Rect decorRect = new Rect();
calculateItemDecorationsForChild(view, decorRect);
int size = Math.max(0, getWidth() / 2 - getPaddingLeft() + lp.leftMargin + lp.rightMargin + decorRect.left + decorRect.right);
int widthSpec = View.MeasureSpec.makeMeasureSpec(size, View.MeasureSpec.AT_MOST);
int heightSpec = getChildMeasureSpec(
getHeight(), getHeightMode(),
getPaddingTop() + getPaddingBottom() + lp.topMargin + lp.bottomMargin + decorRect.top + decorRect.bottom,
lp.height,
canScrollVertically()
);
view.measure(widthSpec, heightSpec);
layoutDecorated(view, 0, 0, getWidth() / 2, getHeight() - getPaddingTop() - getPaddingBottom());
view.setCameraDistance(5 * getWidth());
view.setPivotX(view.getWidth());
view.setPivotY(view.getHeight() / 2);
}
private void layoutRightPage(View view) {
if (view == null) {
return;
}
RecyclerView.LayoutParams lp = (RecyclerView.LayoutParams) view.getLayoutParams();
Rect decorRect = new Rect();
calculateItemDecorationsForChild(view, decorRect);
int size = Math.max(0, getWidth() / 2 - getPaddingRight() + lp.leftMargin + lp.rightMargin + decorRect.left + decorRect.right);
int widthSpec = View.MeasureSpec.makeMeasureSpec(size, View.MeasureSpec.AT_MOST);
int heightSpec = getChildMeasureSpec(
getHeight(), getHeightMode(),
getPaddingTop() + getPaddingBottom() + lp.topMargin + lp.bottomMargin + decorRect.top + decorRect.bottom,
lp.height,
canScrollVertically()
);
view.measure(widthSpec, heightSpec);
layoutDecorated(view, getWidth() / 2, 0, getWidth(), getHeight() - getPaddingTop() - getPaddingBottom());
view.setCameraDistance(5 * getWidth());
view.setPivotX(0);
view.setPivotY(view.getHeight() / 2);
}
private boolean isLeftPage(int position) {
return (position - pageLeftPosition) % 2 == 0;
}
private void fillPages(int pageLeftPosition, RecyclerView.Recycler recycler, RecyclerView.State state) {
if (retainingPage != null) {
detachView(retainingPage);
}
// Cache all existing page view before detaching.
List<View> cacheView = new ArrayList<>();
for (int i = 0; i < getChildCount(); i ++) {
cacheView.add(getChildAt(i));
}
detachAndScrapAttachedViews(recycler);
// According to page position, attach page views.
pageLeftBottom = addViewFromRecycler(pageLeftPosition - 2, recycler, state);
pageLeft = addViewFromRecycler(pageLeftPosition, recycler, state);
pageLeftTop = addViewFromRecycler(pageLeftPosition + 2, recycler, state);
pageRightBottom = addViewFromRecycler(pageLeftPosition + 3, recycler, state);
pageRight = addViewFromRecycler(pageLeftPosition + 1, recycler, state);
pageRightTop = addViewFromRecycler(pageLeftPosition - 1, recycler, state);
this.pageLeftPosition = pageLeftPosition;
// Recycle useless page views
for (View view : cacheView) {
boolean inUse = (view == pageLeftBottom || view == pageLeft || view == pageLeftTop
|| view == pageRightBottom || view == pageRight || view == pageRightTop);
if (!inUse) {
recycler.recycleView(view);
}
}
// Measure and layout pages
layoutLeftPage(pageLeftBottom);
layoutLeftPage(pageLeft);
layoutLeftPage(pageLeftTop);
layoutRightPage(pageRightBottom);
layoutRightPage(pageRight);
layoutRightPage(pageRightTop);
// Attach the retaining page or recycle when the retaining time up.
if (retainingPage != null) {
if (retainingPagePosition == RecyclerView.NO_POSITION) {
retainingPage.setClipBounds(null);
recycler.recycleView(retainingPage);
retainingPage = null;
} else {
attachView(retainingPage);
}
}
}
private int findPageLeftPosition() {
if (pendingPageLeftPosition != RecyclerView.NO_POSITION) {
int pageLeftPosition = pendingPageLeftPosition;
pendingPageLeftPosition = RecyclerView.NO_POSITION;
return pageLeftPosition;
}
if (pageLeft != null) {
return getPosition(pageLeft);
}
if (pageRight != null) {
return getPosition(pageRight) - 1;
}
if (pageRightTop != null) {
return getPosition(pageRightTop) + 1;
}
if (pageLeftTop != null) {
return getPosition(pageLeftTop) - 2;
}
if (pageLeftBottom != null) {
return getPosition(pageLeftBottom) + 2;
}
if (pageRightBottom != null) {
return getPosition(pageRightBottom) - 3;
}
return 0;
//Log.i(TAG, "find current position, position = " + pageLeftPosition);
}
/**
* Get the total scrolling distance for page to turn 90 degree.
* @return the scroll distance.
*/
private int getTurnPageScrollDistance() {
return getWidth() / 2;
}
private void turnPageByScroll() {
float rotation;
if (pageLeftBottom != null) {
pageLeftBottom.setRotationY(0);
}
if (pageRightBottom != null) {
pageRightBottom.setRotationY(0);
}
if (scrollX > 0) {
if (pageLeft != null) {
pageLeft.setRotationY(0);
}
if (pageRightTop != null) {
pageRightTop.setRotationY(-90);
}
int turnPageScrollDistance = getTurnPageScrollDistance();
if (scrollX < turnPageScrollDistance) {
rotation = 90 * scrollX / turnPageScrollDistance;
if (pageRight != null) {
pageRight.setRotationY(-rotation);
}
if (pageLeftTop != null) {
pageLeftTop.setRotationY(90);
}
} else {
if (pageRight != null) {
pageRight.setRotationY(-90);
}
rotation = 90 * (2 * turnPageScrollDistance - scrollX) / turnPageScrollDistance;
if (pageLeftTop != null) {
pageLeftTop.setRotationY(rotation);
}
}
} else if (scrollX < 0) {
if (pageRight != null) {
pageRight.setRotationY(0);
}
if (pageLeftTop != null) {
pageLeftTop.setRotationY(90);
}
int turnPageScrollDistance = getTurnPageScrollDistance();
if (scrollX > -turnPageScrollDistance) {
rotation = 90 * -scrollX / turnPageScrollDistance;
if (pageLeft != null) {
pageLeft.setRotationY(rotation);
}
if (pageRightTop != null) {
pageRightTop.setRotationY(-90);
}
} else {
if (pageLeft != null) {
pageLeft.setRotationY(90);
}
rotation = 90 * (2 * turnPageScrollDistance - (-scrollX)) / turnPageScrollDistance;
if (pageRightTop != null) {
pageRightTop.setRotationY(-rotation);
}
}
} else {
if (pageLeft != null) {
pageLeft.setRotationY(0);
}
if (pageLeftTop != null) {
pageLeftTop.setRotationY(90);
}
if (pageRight != null) {
pageRight.setRotationY(0);
}
if (pageRightTop != null) {
pageRightTop.setRotationY(-90);
}
}
if (pageLeft != null) {
pageLeft.setVisibility(pageLeft.getRotationY() == 90 ? View.INVISIBLE : View.VISIBLE);
}
if (pageLeftTop != null) {
pageLeftTop.setVisibility(pageLeftTop.getRotationY() == 90 ? View.INVISIBLE : View.VISIBLE);
}
if (pageLeftBottom != null) {
pageLeftBottom.setVisibility(View.VISIBLE);
}
if (pageRight != null) {
pageRight.setVisibility(pageRight.getRotationY() == -90 ? View.INVISIBLE : View.VISIBLE);
}
if (pageRightTop != null) {
pageRightTop.setVisibility(pageRightTop.getRotationY() == -90 ? View.INVISIBLE : View.VISIBLE);
}
if (pageRightBottom != null) {
pageRightBottom.setVisibility(View.VISIBLE);
}
//Log.i(TAG, "rotate view by scroll, scroll = " + scrollX + ", rotation = " + rotation);
}
}
|
|
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.nearcache;
import com.hazelcast.config.EvictionConfig;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.monitor.NearCacheStats;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public abstract class NearCacheRecordStoreTestSupport extends CommonNearCacheTestSupport {
protected void putAndGetRecord(InMemoryFormat inMemoryFormat) {
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(
createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat), inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
}
assertEquals(DEFAULT_RECORD_COUNT, nearCacheRecordStore.size());
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
assertEquals("Record-" + i, nearCacheRecordStore.get(i));
}
}
protected void putAndRemoveRecord(InMemoryFormat inMemoryFormat) {
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(nearCacheConfig, inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
// ensure that they are stored
assertNotNull(nearCacheRecordStore.get(i));
}
assertEquals(DEFAULT_RECORD_COUNT, nearCacheRecordStore.size());
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.remove(i);
assertNull(nearCacheRecordStore.get(i));
}
assertEquals(0, nearCacheRecordStore.size());
}
protected void clearRecordsOrDestroyStore(InMemoryFormat inMemoryFormat, boolean destroy) {
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(nearCacheConfig, inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
// ensure that they are stored
assertNotNull(nearCacheRecordStore.get(i));
}
if (destroy) {
nearCacheRecordStore.destroy();
} else {
nearCacheRecordStore.clear();
}
assertEquals(0, nearCacheRecordStore.size());
}
protected void statsCalculated(InMemoryFormat inMemoryFormat) {
long creationStartTime = System.currentTimeMillis();
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(nearCacheConfig, inMemoryFormat);
long creationEndTime = System.currentTimeMillis();
int expectedEntryCount = 0;
int expectedHits = 0;
int expectedMisses = 0;
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
expectedEntryCount++;
}
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
if (nearCacheRecordStore.get(i * 3) != null) {
expectedHits++;
} else {
expectedMisses++;
}
}
NearCacheStats nearCacheStats = nearCacheRecordStore.getNearCacheStats();
long memoryCostWhenFull = nearCacheStats.getOwnedEntryMemoryCost();
// Note that System.currentTimeMillis() is not monotonically increasing.
// Below assertions can fail anytime but for testing purposes we can use `assertTrueEventually`.
assertTrue(nearCacheStats.getCreationTime() >= creationStartTime);
assertTrue(nearCacheStats.getCreationTime() <= creationEndTime);
assertEquals(expectedHits, nearCacheStats.getHits());
assertEquals(expectedMisses, nearCacheStats.getMisses());
assertEquals(expectedEntryCount, nearCacheStats.getOwnedEntryCount());
switch (inMemoryFormat) {
case BINARY:
assertTrue(memoryCostWhenFull > 0);
break;
case OBJECT:
assertEquals(0, memoryCostWhenFull);
}
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
if (nearCacheRecordStore.remove(i * 3)) {
expectedEntryCount--;
}
}
assertEquals(expectedEntryCount, nearCacheStats.getOwnedEntryCount());
switch (inMemoryFormat) {
case BINARY:
assertTrue(nearCacheStats.getOwnedEntryMemoryCost() > 0);
assertTrue(nearCacheStats.getOwnedEntryMemoryCost() < memoryCostWhenFull);
break;
case OBJECT:
assertEquals(0, nearCacheStats.getOwnedEntryMemoryCost());
break;
}
nearCacheRecordStore.clear();
switch (inMemoryFormat) {
case BINARY:
case OBJECT:
assertEquals(0, nearCacheStats.getOwnedEntryMemoryCost());
break;
}
}
protected void ttlEvaluated(InMemoryFormat inMemoryFormat) {
int ttlSeconds = 3;
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
nearCacheConfig.setTimeToLiveSeconds(ttlSeconds);
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(
nearCacheConfig, inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
}
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
assertNotNull(nearCacheRecordStore.get(i));
}
sleepSeconds(ttlSeconds + 1);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
assertNull(nearCacheRecordStore.get(i));
}
}
protected void maxIdleTimeEvaluatedSuccessfully(InMemoryFormat inMemoryFormat) {
int maxIdleSeconds = 3;
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
nearCacheConfig.setMaxIdleSeconds(maxIdleSeconds);
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(
nearCacheConfig, inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
}
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
assertNotNull(nearCacheRecordStore.get(i));
}
sleepSeconds(maxIdleSeconds + 1);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
assertNull(nearCacheRecordStore.get(i));
}
}
protected void expiredRecordsCleanedUpSuccessfully(InMemoryFormat inMemoryFormat, boolean useIdleTime) {
int cleanUpThresholdSeconds = 3;
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
if (useIdleTime) {
nearCacheConfig.setMaxIdleSeconds(cleanUpThresholdSeconds);
} else {
nearCacheConfig.setTimeToLiveSeconds(cleanUpThresholdSeconds);
}
NearCacheRecordStore<Integer, String> nearCacheRecordStore = createNearCacheRecordStore(
nearCacheConfig, inMemoryFormat);
for (int i = 0; i < DEFAULT_RECORD_COUNT; i++) {
nearCacheRecordStore.put(i, "Record-" + i);
}
sleepSeconds(cleanUpThresholdSeconds + 1);
nearCacheRecordStore.doExpiration();
assertEquals(0, nearCacheRecordStore.size());
NearCacheStats nearCacheStats = nearCacheRecordStore.getNearCacheStats();
assertEquals(0, nearCacheStats.getOwnedEntryCount());
assertEquals(0, nearCacheStats.getOwnedEntryMemoryCost());
}
protected void createNearCacheWithMaxSizePolicy(InMemoryFormat inMemoryFormat, EvictionConfig.MaxSizePolicy maxSizePolicy,
int size) {
NearCacheConfig nearCacheConfig = createNearCacheConfig(DEFAULT_NEAR_CACHE_NAME, inMemoryFormat);
EvictionConfig evictionConfig = new EvictionConfig();
evictionConfig.setMaximumSizePolicy(maxSizePolicy);
evictionConfig.setSize(size);
nearCacheConfig.setEvictionConfig(evictionConfig);
createNearCacheRecordStore(nearCacheConfig, inMemoryFormat);
}
}
|
|
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.thoughtworks.go.util;
import com.rits.cloning.Cloner;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.exceptions.NoSuchEnvironmentException;
import com.thoughtworks.go.config.materials.Filter;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.PackageMaterialConfig;
import com.thoughtworks.go.config.materials.svn.SvnMaterialConfig;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.config.server.security.ldap.BaseConfig;
import com.thoughtworks.go.config.server.security.ldap.BasesConfig;
import com.thoughtworks.go.domain.ServerSiteUrlConfig;
import com.thoughtworks.go.domain.config.Admin;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.materials.svn.Subversion;
import com.thoughtworks.go.domain.materials.svn.SvnCommand;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.server.util.ServerVersion;
import com.thoughtworks.go.serverhealth.ServerHealthService;
import com.thoughtworks.go.service.ConfigRepository;
import org.apache.commons.io.FileUtils;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.List;
import static com.thoughtworks.go.config.PipelineConfigs.DEFAULT_GROUP;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
/**
* @understands how to edit the cruise config file for testing
*/
public class GoConfigFileHelper {
private final File configFile;
private final String originalXml;
public GoConfigMother goConfigMother = new GoConfigMother();
private File passwordFile = null;
private GoConfigDao goConfigDao;
private CachedGoConfig cachedGoConfig;
private SystemEnvironment sysEnv;
private String originalConfigDir;
public GoConfigFileHelper() {
this(ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS);
}
public GoConfigFileHelper(GoPartialConfig partials) {
this(ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS,partials);
}
public GoConfigFileHelper(GoConfigDao goConfigDao) {
this(ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS, goConfigDao);
}
private GoConfigFileHelper(String xml, GoConfigDao goConfigDao) {
new SystemEnvironment().setProperty(SystemEnvironment.ENFORCE_SERVERID_MUTABILITY, "N");
this.originalXml = xml;
assignFileDao(goConfigDao);
try {
File dir = TestFileUtil.createTempFolder("server-config-dir");
this.configFile = new File(dir, "cruise-config.xml");
configFile.deleteOnExit();
sysEnv = new SystemEnvironment();
sysEnv.setProperty(SystemEnvironment.CONFIG_FILE_PROPERTY, configFile.getAbsolutePath());
initializeConfigFile();
} catch (IOException e) {
throw bomb("Error creating config file", e);
}
}
private void assignFileDao(GoConfigDao goConfigDao) {
this.goConfigDao = goConfigDao;
try {
Field field = GoConfigDao.class.getDeclaredField("cachedConfigService");
field.setAccessible(true);
this.cachedGoConfig = (CachedGoConfig) field.get(goConfigDao);
} catch (Exception e) {
bomb(e);
}
}
/**
* Creates config dao that accesses single file
*/
public static GoConfigDao createTestingDao() {
SystemEnvironment systemEnvironment = new SystemEnvironment();
try {
ServerHealthService serverHealthService = new ServerHealthService();
ConfigRepository configRepository = new ConfigRepository(systemEnvironment);
configRepository.initialize();
ConfigCache configCache = new ConfigCache();
ConfigElementImplementationRegistry configElementImplementationRegistry = ConfigElementImplementationRegistryMother.withNoPlugins();
GoFileConfigDataSource dataSource = new GoFileConfigDataSource(new DoNotUpgrade(), configRepository, systemEnvironment, new TimeProvider(),
configCache, new ServerVersion(), configElementImplementationRegistry, serverHealthService);
dataSource.upgradeIfNecessary();
CachedFileGoConfig fileService = new CachedFileGoConfig(dataSource,serverHealthService);
GoConfigWatchList configWatchList = new GoConfigWatchList(fileService);
GoRepoConfigDataSource repoConfigDataSource = new GoRepoConfigDataSource(configWatchList,
new GoConfigPluginService(configCache,configElementImplementationRegistry));
GoPartialConfig partialConfig = new GoPartialConfig(repoConfigDataSource,configWatchList);
MergedGoConfig cachedConfigService = new MergedGoConfig(serverHealthService,fileService,partialConfig);
cachedConfigService.loadConfigIfNull();
return new GoConfigDao(cachedConfigService);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Creates config dao that has custom remote configuration parts provided by partialConfig argument
*/
public static GoConfigDao createTestingDao(GoPartialConfig partialConfig) {
SystemEnvironment systemEnvironment = new SystemEnvironment();
try {
ServerHealthService serverHealthService = new ServerHealthService();
ConfigRepository configRepository = new ConfigRepository(systemEnvironment);
configRepository.initialize();
GoFileConfigDataSource dataSource = new GoFileConfigDataSource(new DoNotUpgrade(), configRepository, systemEnvironment, new TimeProvider(),
new ConfigCache(), new ServerVersion(), com.thoughtworks.go.util.ConfigElementImplementationRegistryMother.withNoPlugins(), serverHealthService);
dataSource.upgradeIfNecessary();
CachedFileGoConfig fileService = new CachedFileGoConfig(dataSource,serverHealthService);
MergedGoConfig cachedConfigService = new MergedGoConfig(serverHealthService,fileService,partialConfig);
cachedConfigService.loadConfigIfNull();
return new GoConfigDao(cachedConfigService);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public GoConfigFileHelper(File configFile) {
assignFileDao(createTestingDao());
this.configFile = configFile.getAbsoluteFile();
ConfigMigrator.migrate(this.configFile);
try {
this.originalXml = FileUtils.readFileToString(this.configFile, "UTF-8");
} catch (IOException e) {
throw bomb("Error reading config file", e);
}
new SystemEnvironment().setProperty(SystemEnvironment.CONFIG_FILE_PROPERTY, this.configFile.getAbsolutePath());
}
public GoConfigFileHelper(String xml) {
this(xml, createTestingDao());
}
public GoConfigFileHelper(String xml,GoPartialConfig partials) {
this(xml, createTestingDao(partials));
}
public GoConfigDao getGoConfigDao() {
return goConfigDao;
}
public CachedGoConfig getCachedGoConfig() {
return cachedGoConfig;
}
public void setArtifactsDir(String artifactsDir) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.server().setArtifactsDir(artifactsDir);
writeConfigFile(cruiseConfig);
}
public GoConfigFileHelper usingCruiseConfigDao(GoConfigDao goConfigDao) {
assignFileDao(goConfigDao);
return this;
}
public void usingEmptyConfigFileWithLicenseAllowsTwoAgents() {
writeToFileAndDB(ConfigFileFixture.DEFAULT_XML_WITH_2_AGENTS);
}
private void writeToFileAndDB(String configContent) {
writeXmlToConfigFile(loadAndMigrate(configContent));
writeConfigFile(load());
}
public static GoConfigFileHelper usingEmptyConfigFileWithLicenseAllowsUnlimitedAgents() {
return new GoConfigFileHelper(ConfigFileFixture.DEFAULT_XML_WITH_UNLIMITED_AGENTS);
}
public void writeXmlToConfigFile(String xml) {
try {
FileUtils.writeStringToFile(configFile, xml);
goConfigDao.forceReload();
} catch (Exception e) {
throw bomb("Error writing config file: " + configFile.getAbsolutePath(), e);
}
}
public void onSetUp() throws IOException {
initializeConfigFile();
goConfigDao.forceReload();
writeConfigFile(load());
originalConfigDir = sysEnv.getConfigDir();
File configDir = configFile.getParentFile();
sysEnv.setProperty(SystemEnvironment.CONFIG_DIR_PROPERTY, configDir.getAbsolutePath());
}
public void initializeConfigFile() throws IOException {
FileUtils.deleteQuietly(passwordFile);
writeXmlToConfigFile(ConfigMigrator.migrate(originalXml));
}
public void onTearDown() {
sysEnv.setProperty(SystemEnvironment.CONFIG_DIR_PROPERTY, originalConfigDir);
FileUtils.deleteQuietly(configFile);
try {
cachedGoConfig.save(originalXml, true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public PipelineConfig addPipeline(String pipelineName, String stageName) {
return addPipeline(pipelineName, stageName, "unit", "functional");
}
public PipelineConfig addPipeline(String pipelineName, String stageName, Subversion repository) {
return addPipeline(pipelineName, stageName, repository, "unit", "functional");
}
public PipelineConfig addPipeline(String pipelineName, String stageName, String... buildNames) {
return addPipelineWithGroup(BasicPipelineConfigs.DEFAULT_GROUP, pipelineName, stageName, buildNames);
}
public PipelineTemplateConfig addTemplate(String pipelineName, String stageName) {
return addTemplate(pipelineName, new Authorization(), stageName);
}
public PipelineTemplateConfig addTemplate(String pipelineName, Authorization authorization, String stageName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineTemplateConfig templateConfig = PipelineTemplateConfigMother.createTemplate(pipelineName, authorization, StageConfigMother.manualStage(stageName));
cruiseConfig.getTemplates().add(templateConfig);
writeConfigFile(cruiseConfig);
return templateConfig;
}
public PipelineConfig addPipelineWithGroup(String groupName, String pipelineName, String stageName, String... buildNames) {
return addPipelineWithGroup(groupName, pipelineName, new SvnCommand(null, "svn:///user:pass@tmp/foo"), stageName, buildNames);
}
public PipelineConfig addPipelineWithTemplate(String groupName, String pipelineName, String templateName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString(pipelineName), MaterialConfigsMother.mockMaterialConfigs("svn:///user:pass@tmp/foo"));
pipelineConfig.setTemplateName(new CaseInsensitiveString(templateName));
cruiseConfig.findGroup(groupName).add(pipelineConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipelineWithGroup(String groupName, String pipelineName, SvnCommand svnCommand, String stageName, String... buildNames) {
return addPipelineWithGroupAndTimer(groupName, pipelineName, new MaterialConfigs(MaterialConfigsMother.mockMaterialConfigs(svnCommand.getUrlForDisplay())), stageName, null, buildNames);
}
public PipelineConfig addPipelineWithGroup(String groupName, String pipelineName, MaterialConfigs materialConfigs, String stageName, String... buildNames) {
return addPipelineWithGroupAndTimer(groupName, pipelineName, materialConfigs, stageName, null, buildNames);
}
public PipelineConfig addPipelineWithGroupAndTimer(String groupName, String pipelineName, MaterialConfigs materialConfigs, String stageName, TimerConfig timer, String... buildNames) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipelineWithGroupAndTimer(cruiseConfig, groupName, pipelineName, materialConfigs, stageName, timer, buildNames);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public interface Updater<T>{
void update(T t);
}
public PipelineConfig updatePipeline(CaseInsensitiveString pipelineName, Updater<PipelineConfig> updater) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.getPipelineConfigByName(pipelineName);
updater.update(pipelineConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipeline(PipelineConfig pipelineConfig) {
return addPipelineToGroup(pipelineConfig, "quux-group");
}
public PipelineConfig addPipelineToGroup(PipelineConfig pipelineConfig, final String groupName) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.addPipeline(groupName, pipelineConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipelineWithGroup(String groupName, String pipelineName, MaterialConfigs materialConfigs, MingleConfig mingleConfig, String stageName, String... buildNames) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipelineWithGroup(cruiseConfig, groupName, pipelineName,
materialConfigs,
stageName,
buildNames);
pipelineConfig.setMingleConfig(mingleConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipelineWithGroup(String groupName, String pipelineName, MaterialConfigs materialConfigs, TrackingTool trackingTool, String stageName, String... jobs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipelineWithGroup(cruiseConfig, groupName, pipelineName,
materialConfigs,
stageName,
jobs);
pipelineConfig.setTrackingTool(trackingTool);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipeline(String pipelineName, String stageName, Subversion repository, String... buildNames) {
return addPipeline(pipelineName, stageName, new SvnMaterialConfig(repository.getUrl().forCommandline(), repository.getUserName(), repository.getPassword(), repository.isCheckExternals()), buildNames);
}
public void updateArtifactRoot(String path) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.server().updateArtifactRoot(path);
writeConfigFile(cruiseConfig);
}
public PipelineConfig addPipeline(String pipelineName, String stageName, Subversion repository, Filter filter, String... buildNames) throws Exception {
return addPipeline(pipelineName, stageName, new SvnMaterialConfig(repository.getUrl().forCommandline(), repository.getUserName(), repository.getPassword(), repository.isCheckExternals()), filter, buildNames);
}
private PipelineConfig addPipeline(String pipelineName, String stageName, SvnMaterialConfig svnMaterialConfig, Filter filter,
String... buildNames) throws Exception {
svnMaterialConfig.setFilter(filter);
return addPipeline(pipelineName, stageName, svnMaterialConfig, buildNames);
}
public PipelineConfig addPipeline(String pipelineName, String stageName, MaterialConfig materialConfig, String... buildNames) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, pipelineName, stageName, new MaterialConfigs(materialConfig), buildNames);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipeline(String pipelineName, String stageName, MaterialConfig materialConfig, MingleConfig mingleConfig, String... jobs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, pipelineName, stageName, new MaterialConfigs(materialConfig), jobs);
pipelineConfig.setMingleConfig(mingleConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipeline(String pipelineName, String stageName, MaterialConfig materialConfig, TrackingTool trackingTool, String... jobs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, pipelineName, stageName, new MaterialConfigs(materialConfig), jobs);
pipelineConfig.setTrackingTool(trackingTool);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addPipeline(String pipelineName, String stageName, MaterialConfigs materialConfigs, String... buildNames) throws Exception {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addPipeline(cruiseConfig, pipelineName, stageName, materialConfigs, buildNames);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addStageToPipeline(String pipelineName, String stageName) throws Exception {
return addStageToPipeline(pipelineName, stageName, "unit");
}
public PipelineConfig addStageToPipeline(String pipelineName, String stageName, String... buildNames)
throws Exception {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addStageToPipeline(cruiseConfig, pipelineName, stageName,
buildNames);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public PipelineConfig addStageToPipeline(String pipelineName, StageConfig stageConfig) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.add(stageConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public void addEnvironmentVariableToPipeline(String pipelineName, EnvironmentVariablesConfig envVars) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.setVariables(envVars);
writeConfigFile(cruiseConfig);
}
public void addEnvironmentVariableToStage(String pipelineName, String stageName, EnvironmentVariablesConfig envVars) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
stageConfig.setVariables(envVars);
writeConfigFile(cruiseConfig);
}
public void addEnvironmentVariableToJob(String pipelineName, String stageName, String jobName, EnvironmentVariablesConfig envVars) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
JobConfig jobConfig = stageConfig.jobConfigByConfigName(new CaseInsensitiveString(jobName));
jobConfig.setVariables(envVars);
writeConfigFile(cruiseConfig);
}
public PipelineConfig addStageToPipeline(String pipelineName, String stageName, int stageindex,
String... buildNames) throws Exception {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = goConfigMother.addStageToPipeline(
cruiseConfig, pipelineName, stageName, stageindex, buildNames);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public StageConfig removeStage(String pipelineName, String stageName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
pipelineConfig.remove(stageConfig);
writeConfigFile(cruiseConfig);
return stageConfig;
}
public void removePipeline(String pipelineName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfigs groups = removePipeline(pipelineName, cruiseConfig);
if (groups.isEmpty()) {
cruiseConfig.getGroups().remove(groups);
}
writeConfigFile(cruiseConfig);
}
public PipelineConfigs removePipeline(String pipelineName, CruiseConfig cruiseConfig) {
String groupName = cruiseConfig.getGroups().findGroupNameByPipeline(new CaseInsensitiveString(pipelineName));
PipelineConfigs groups = cruiseConfig.getGroups().findGroup(groupName);
groups.remove(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName)));
return groups;
}
public StageConfig addJob(String pipelineName, String stageName, String jobName) {
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString(jobName), new Resources(), new ArtifactPlans());
return addJobToStage(pipelineName, stageName, jobConfig);
}
public StageConfig addJobToStage(String pipelineName, String stageName, JobConfig jobConfig) {
return pushJobIntoStage(pipelineName, stageName, jobConfig, false);
}
public void replaceAllJobsInStage(String pipelineName, String stageName, JobConfig jobConfig) {
pushJobIntoStage(pipelineName, stageName, jobConfig, true);
}
private StageConfig pushJobIntoStage(String pipelineName, String stageName, JobConfig jobConfig, boolean clearExistingJobs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
if (clearExistingJobs) {
stageConfig.allBuildPlans().clear();
}
stageConfig.allBuildPlans().add(jobConfig);
writeConfigFile(cruiseConfig);
return stageConfig;
}
public PipelineConfig addPipelineWithInvalidMaterial(String pipelineName, String stageName) {
CruiseConfig cruiseConfig = loadForEdit();
StageConfig stageConfig = StageConfigMother.custom(stageName, defaultBuildPlans("buildName"));
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString(pipelineName), invalidRepositoryMaterialConfigs(), stageConfig);
cruiseConfig.addPipeline(DEFAULT_GROUP, pipelineConfig);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public File getConfigFile() {
return configFile;
}
private MaterialConfig invalidSvnMaterialConfig() {
return new SvnMaterialConfig("invalid://invalid url", "user", "password", false);
}
private MaterialConfigs invalidRepositoryMaterialConfigs() {
return new MaterialConfigs(invalidSvnMaterialConfig());
}
private static JobConfigs defaultBuildPlans(String... planNames) {
JobConfigs plans = new JobConfigs();
for (String name : planNames) {
plans.add(defaultBuildPlan(name));
}
return plans;
}
private static JobConfig defaultBuildPlan(String name) {
return new JobConfig(new CaseInsensitiveString(name), new Resources(), new ArtifactPlans());
}
public CruiseConfig load() {
try {
goConfigDao.forceReload();
return new Cloner().deepClone(goConfigDao.load());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public CruiseConfig loadForEdit() {
try {
goConfigDao.forceReload();
return new Cloner().deepClone(goConfigDao.loadForEditing());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public CruiseConfig currentConfig() {
return load();
}
public void addConfigRepo(ConfigRepoConfig configRepoConfig) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.getConfigRepos().add(configRepoConfig);
writeConfigFile(cruiseConfig);
}
public void addAgent(String hostname, String uuid) {
addAgent(new AgentConfig(uuid, hostname, "127.0.0.1"));
}
public void addAgent(AgentConfig newAgentConfig) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.agents().add(newAgentConfig);
writeConfigFile(cruiseConfig);
}
public void makeJobRunOnAllAgents(String pipeline, String stageName, String jobName) {
CruiseConfig cruiseConfig = currentConfig();
cruiseConfig.jobConfigByName(pipeline, stageName, jobName, true).setRunOnAllAgents(true);
writeConfigFile(cruiseConfig);
}
public void addSecurity(SecurityConfig securityConfig) {
CruiseConfig config = loadForEdit();
config.server().useSecurity(securityConfig);
writeConfigFile(config);
}
public void turnOffSecurity() {
addSecurity(new SecurityConfig());
}
public void addLdapSecurity(String uri, String managerDn, String managerPassword, String searchBase,
String searchFilter) {
LdapConfig ldapConfig = new LdapConfig(uri, managerDn, managerPassword, null, true, new BasesConfig(new BaseConfig(searchBase)), searchFilter);
addLdapSecurityWith(ldapConfig, true, new PasswordFileConfig(), new AdminsConfig());
}
public void addLdapSecurityWithAdmin(String uri, String managerDn, String managerPassword, String searchBase,
String searchFilter, String adminUser) {
LdapConfig ldapConfig = new LdapConfig(uri, managerDn, managerPassword, null, true, new BasesConfig(new BaseConfig(searchBase)), searchFilter);
addLdapSecurityWith(ldapConfig, true, new PasswordFileConfig(), new AdminsConfig(new AdminUser(new CaseInsensitiveString(adminUser))));
}
public void addLdapSecurityWith(LdapConfig ldapConfig, boolean anonymous, PasswordFileConfig passwordFileConfig,
AdminsConfig adminsConfig) {
SecurityConfig securityConfig = new SecurityConfig(ldapConfig, passwordFileConfig, anonymous, adminsConfig);
addSecurity(securityConfig);
}
public void addSecurityWithBogusLdapConfig(boolean anonymous) {
addLdapSecurityWith(new LdapConfig("uri", "dn", "pw", null, true, new BasesConfig(new BaseConfig("sb")), "sf"), anonymous, new PasswordFileConfig(),
new AdminsConfig());
}
public File addSecurityWithPasswordFile() throws IOException {
addLdapSecurityWith(new LdapConfig(new GoCipher()), true, new PasswordFileConfig(addPasswordFile().getAbsolutePath()), new AdminsConfig());
return passwordFile;
}
public File turnOnSecurity() throws IOException {
return addSecurityWithPasswordFile();
}
public void addSecurityWithNonExistantPasswordFile() throws IOException {
addLdapSecurityWith(new LdapConfig(new GoCipher()), true,
new PasswordFileConfig(new File("invalid", "path").getAbsolutePath()),
new AdminsConfig());
}
public void addSecurityWithAdminConfig() throws Exception {
addLdapSecurityWith(new LdapConfig(new GoCipher()), true, new PasswordFileConfig(addPasswordFile().getAbsolutePath()),
new AdminsConfig(new AdminUser(new CaseInsensitiveString("admin1"))));
}
private File addPasswordFile() throws IOException {
passwordFile = TestFileUtil.createTempFile("password.properties");
passwordFile.deleteOnExit();
final String nonAdmin = "jez=ThmbShxAtJepX80c2JY1FzOEmUk=\n"; //in plain text: badger
final String admin1 = "admin1=W6ph5Mm5Pz8GgiULbPgzG37mj9g=\n"; //in plain text: password
FileUtils.writeStringToFile(passwordFile, nonAdmin + admin1);
return passwordFile;
}
public void addMailHost(MailHost mailHost) {
CruiseConfig config = loadForEdit();
config.server().updateMailHost(mailHost);
writeConfigFile(config);
}
public void addRole(Role role) {
CruiseConfig config = loadForEdit();
config.server().security().addRole(role);
writeConfigFile(config);
}
public void replaceMaterialWithHgRepoForPipeline(String pipelinename, String hgUrl) {
replaceMaterialForPipeline(pipelinename, MaterialConfigsMother.hgMaterialConfig(hgUrl));
}
public PipelineConfig replaceMaterialForPipeline(String pipelinename, MaterialConfig materialConfig) {
return replaceMaterialConfigForPipeline(pipelinename, new MaterialConfigs(materialConfig));
}
public PipelineConfig replaceMaterialConfigForPipeline(String pipelinename, MaterialConfig materialConfig) {
return replaceMaterialConfigForPipeline(pipelinename, new MaterialConfigs(materialConfig));
}
public PipelineConfig setMaterialConfigForPipeline(String pipelinename, MaterialConfig... materialConfigs) {
return addMaterialConfigForPipeline(pipelinename, materialConfigs);
}
private PipelineConfig addMaterialConfigForPipeline(String pipelinename, MaterialConfig... materialConfigs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelinename));
pipelineConfig.setMaterialConfigs(new MaterialConfigs(materialConfigs));
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
private PipelineConfig replaceMaterialConfigForPipeline(String pipelinename, MaterialConfigs materialConfigs) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelinename));
pipelineConfig.setMaterialConfigs(materialConfigs);
writeConfigFile(cruiseConfig);
return pipelineConfig;
}
public void requireApproval(String pipelineName, String stageName) {
CruiseConfig cruiseConfig = loadForEdit();
cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName)).findBy(new CaseInsensitiveString(stageName)).updateApproval(Approval.manualApproval());
writeConfigFile(cruiseConfig);
}
public void setDependencyOn(PipelineConfig product, String pipelineName, String stageName) {
CruiseConfig cruiseConfig = loadForEdit();
goConfigMother.setDependencyOn(cruiseConfig, product, pipelineName, stageName);
writeConfigFile(cruiseConfig);
}
public void writeConfigFile(CruiseConfig cruiseConfig) {
try {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
getXml(cruiseConfig, buffer);
cachedGoConfig.save(new String(buffer.toByteArray()), false);
} catch (Exception e) {
throw bomb(e);
}
}
public void getXml(CruiseConfig cruiseConfig, ByteArrayOutputStream buffer) throws Exception {
new MagicalGoConfigXmlWriter(new ConfigCache(), com.thoughtworks.go.util.ConfigElementImplementationRegistryMother.withNoPlugins()).write(cruiseConfig, buffer, false);
}
public void configureStageAsAutoApproval(String pipelineName, String stage) {
updateApproval(pipelineName, stage, Approval.automaticApproval());
}
public void configureStageAsManualApproval(String pipelineName, String stage) {
updateApproval(pipelineName, stage, Approval.manualApproval());
}
public void addAuthorizedUserForStage(String pipelineName, String stageName, String... users) {
configureStageAsManualApproval(pipelineName, stageName);
CruiseConfig cruiseConfig = loadForEdit();
StageConfig stageConfig = cruiseConfig.stageConfigByName(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName));
Approval approval = stageConfig.getApproval();
for (String user : users) {
approval.getAuthConfig().add(new AdminUser(new CaseInsensitiveString(user)));
}
writeConfigFile(cruiseConfig);
}
public void addAuthorizedUserForPipelineGroup(String user) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfigs group = cruiseConfig.getGroups().first();
group.getAuthorization().getViewConfig().add(new AdminUser(new CaseInsensitiveString(user)));
writeConfigFile(cruiseConfig);
}
public void addAuthorizedUserForPipelineGroup(String user, String groupName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfigs group = cruiseConfig.getGroups().findGroup(groupName);
group.getAuthorization().getViewConfig().add(new AdminUser(new CaseInsensitiveString(user)));
writeConfigFile(cruiseConfig);
}
private void updateApproval(String pipelineName, String ftStage, Approval manualApproval) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig config = pipelineConfig.findBy(new CaseInsensitiveString(ftStage));
config.updateApproval(manualApproval);
writeConfigFile(cruiseConfig);
}
public boolean isSecurityEnabled() {
CruiseConfig cruiseConfig = loadForEdit();
return cruiseConfig.server().isSecurityEnabled();
}
public static CruiseConfig load(String content) {
try {
return new GoConfigFileHelper(content).currentConfig();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static String loadAndMigrate(String originalContent) {
GoConfigFileHelper helper = new GoConfigFileHelper(originalContent);
try {
return FileUtils.readFileToString(helper.getConfigFile());
} catch (IOException e) {
throw bomb(e);
}
}
public void setAdminPermissionForGroup(String groupName, String user) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfigs group = cruiseConfig.getGroups().findGroup(groupName);
group.getAuthorization().getAdminsConfig().add(new AdminUser(new CaseInsensitiveString(user)));
writeConfigFile(cruiseConfig);
}
public void setViewPermissionForGroup(String groupName, String username) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfigs group = cruiseConfig.getGroups().findGroup(groupName);
group.getAuthorization().getViewConfig().add(new AdminUser(new CaseInsensitiveString(username)));
writeConfigFile(cruiseConfig);
}
public void setOperatePermissionForGroup(String groupName, String... userNames) {
CruiseConfig cruiseConfig = loadForEdit();
Admin[] admins = AdminUserMother.adminUsers(userNames);
for (Admin admin : admins) {
cruiseConfig.getGroups().findGroup(groupName).getAuthorization().getOperationConfig().add(admin);
}
writeConfigFile(cruiseConfig);
}
public void setOperatePermissionForStage(String pipelineName, String stageName, String username) {
CruiseConfig cruiseConfig = loadForEdit();
StageConfig stageConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName)).findBy(new CaseInsensitiveString(stageName));
stageConfig.updateApproval(new Approval(new AuthConfig(new AdminUser(new CaseInsensitiveString(username)))));
writeConfigFile(cruiseConfig);
}
public void setPipelineLabelTemplate(String pipelineName, String labelTemplate) {
CruiseConfig config = loadForEdit();
config.pipelineConfigByName(new CaseInsensitiveString(pipelineName)).setLabelTemplate(labelTemplate);
writeConfigFile(config);
}
public void setupMailHost() {
CruiseConfig config = loadForEdit();
config.server().setMailHost(
new MailHost("10.18.3.171", 25, "cruise2", "password", true, false, "cruise2@cruise.com", "admin@cruise.com"));
writeConfigFile(config);
}
public void addAgentToEnvironment(String env, String uuid) {
CruiseConfig config = loadForEdit();
config.getEnvironments().addAgentsToEnvironment(env, uuid);
writeConfigFile(config);
}
public void addPipelineToEnvironment(String env, String pipelineName) {
CruiseConfig config = loadForEdit();
config.getEnvironments().addPipelinesToEnvironment(env, pipelineName);
writeConfigFile(config);
}
public void setRunOnAllAgents(String pipelineName, String stageName, String jobName, boolean runOnAllAgents) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.findBy(new CaseInsensitiveString(stageName)).jobConfigByInstanceName(jobName, true).setRunOnAllAgents(runOnAllAgents);
writeConfigFile(config);
}
public void setRunMultipleInstance(String pipelineName, String stageName, String jobName, Integer runInstanceCount) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.findBy(new CaseInsensitiveString(stageName)).jobConfigByInstanceName(jobName, true).setRunInstanceCount(runInstanceCount);
writeConfigFile(config);
}
public void addResourcesFor(String pipelineName, String stageName, String jobName, String... resources) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
for (String resource : resources) {
pipelineConfig.findBy(new CaseInsensitiveString(stageName)).jobConfigByConfigName(new CaseInsensitiveString(jobName)).addResource(resource);
}
writeConfigFile(config);
}
public void addAssociatedEntitiesForAJob(String pipelineName, String stageName, String jobName, Resources resources,
ArtifactPlans artifactPlans, ArtifactPropertiesGenerators artifactPropertiesGenerators) {
CruiseConfig config = loadForEdit();
JobConfig jobConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName)).findBy(new CaseInsensitiveString(stageName)).jobConfigByConfigName(new CaseInsensitiveString(jobName));
ReflectionUtil.setField(jobConfig, "resources", resources);
ReflectionUtil.setField(jobConfig, "artifactPlans", artifactPlans);
ReflectionUtil.setField(jobConfig, "artifactPropertiesGenerators", artifactPropertiesGenerators);
writeConfigFile(config);
}
public PipelineConfig addMaterialToPipeline(String pipelineName, MaterialConfig materialConfig) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
for (MaterialConfig materialConfig1 : new MaterialConfig[]{materialConfig}) {
pipelineConfig.addMaterialConfig(materialConfig1);
}
writeConfigFile(config);
return pipelineConfig;
}
public PipelineConfig removeMaterialFromPipeline(String pipelineName, MaterialConfig materialConfig) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.removeMaterialConfig(materialConfig);
writeConfigFile(config);
return pipelineConfig;
}
public PipelineConfig changeStagenameForToPipeline(String pipelineName, String oldStageName, String newStageName) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stage = pipelineConfig.getStage(new CaseInsensitiveString(oldStageName));
int index = pipelineConfig.indexOf(stage);
stage = StageConfigMother.custom(newStageName, stage.isFetchMaterials(), stage.isCleanWorkingDir(), stage.getJobs(), stage.getApproval());
pipelineConfig.set(index, stage);
writeConfigFile(config);
return pipelineConfig;
}
public void blockPipelineGroupExceptFor(String pipelineGroupName, String roleName) {
CruiseConfig config = loadForEdit();
PipelineConfigs configs = config.getGroups().findGroup(pipelineGroupName);
Authorization authorization = new Authorization(new OperationConfig(new AdminRole(new CaseInsensitiveString(roleName))), new ViewConfig(new AdminRole(new CaseInsensitiveString(roleName))));
configs.setAuthorization(authorization);
writeConfigFile(config);
}
public void addAdmins(String... adminNames) {
CruiseConfig cruiseConfig = loadForEdit();
AdminsConfig adminsConfig = cruiseConfig.server().security().adminsConfig();
for (String adminName : adminNames) {
adminsConfig.add(new AdminUser(new CaseInsensitiveString(adminName)));
}
writeConfigFile(cruiseConfig);
}
public void addAdminRoles(String... roleNames) {
CruiseConfig cruiseConfig = loadForEdit();
AdminsConfig adminsConfig = cruiseConfig.server().security().adminsConfig();
for (String roleName : roleNames) {
adminsConfig.add(new AdminRole(new CaseInsensitiveString(roleName)));
}
writeConfigFile(cruiseConfig);
}
public void lockPipeline(String name) {
CruiseConfig config = loadForEdit();
PipelineConfig pipeline = config.pipelineConfigByName(new CaseInsensitiveString(name));
pipeline.lockExplicitly();
writeConfigFile(config);
}
public void addEnvironments(String... environmentNames) {
CruiseConfig config = loadForEdit();
for (String environmentName : environmentNames) {
config.addEnvironment(environmentName);
}
writeConfigFile(config);
}
public void addEnvironments(List<String> environmentNames) {
addEnvironments(environmentNames.toArray(new String[environmentNames.size()]));
}
public void addEnvironmentVariablesToEnvironment(String environmentName, String variableName, String variableValue) throws NoSuchEnvironmentException {
CruiseConfig config = loadForEdit();
EnvironmentConfig env = config.getEnvironments().named(new CaseInsensitiveString(environmentName));
env.addEnvironmentVariable(variableName, variableValue);
writeConfigFile(config);
}
public void deleteConfigFile() {
configFile.delete();
}
public static EnvironmentVariablesConfig env(String name, String value) {
return EnvironmentVariablesConfigMother.env(name, value);
}
public static EnvironmentVariablesConfig env(String [] names, String [] values) {
return EnvironmentVariablesConfigMother.env(names, values);
}
public void addMingleConfigToPipeline(String pipelineName, MingleConfig mingleConfig) {
CruiseConfig config = loadForEdit();
PipelineConfig pipelineConfig = config.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
pipelineConfig.setMingleConfig(mingleConfig);
writeConfigFile(config);
}
public void setBaseUrls(ServerSiteUrlConfig siteUrl, ServerSiteUrlConfig secureSiteUrl) {
CruiseConfig config = loadForEdit();
config.setServerConfig(
new ServerConfig(config.server().security(), config.server().mailHost(), siteUrl, secureSiteUrl));
writeConfigFile(config);
}
public void removeJob(String pipelineName, String stageName, String jobName) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipelineName));
StageConfig stageConfig = pipelineConfig.findBy(new CaseInsensitiveString(stageName));
JobConfig job = stageConfig.getJobs().getJob(new CaseInsensitiveString(jobName));
stageConfig.getJobs().remove(job);
writeConfigFile(cruiseConfig);
}
public void addParamToPipeline(String pipeline, String paramName, String paramValue) {
CruiseConfig cruiseConfig = loadForEdit();
PipelineConfig pipelineConfig = cruiseConfig.pipelineConfigByName(new CaseInsensitiveString(pipeline));
pipelineConfig.addParam(new ParamConfig(paramName, paramValue));
writeConfigFile(cruiseConfig);
}
public void addPackageDefinition(PackageMaterialConfig packageMaterialConfig){
CruiseConfig config = loadForEdit();
PackageRepository repository = packageMaterialConfig.getPackageDefinition().getRepository();
config.getPackageRepositories().add(repository);
writeConfigFile(config);
}
public void addSCMConfig(SCM scmConfig) {
CruiseConfig config = loadForEdit();
config.getSCMs().add(scmConfig);
writeConfigFile(config);
}
public NoOverwriteUpdateConfigCommand addPipelineCommand(final String oldMd5, final String pipelineName, final String stageName, final String jobName) {
return new NoOverwriteUpdateConfigCommand() {
@Override
public String unmodifiedMd5() {
return oldMd5;
}
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
cruiseConfig.addPipeline("g1", PipelineConfigMother.pipelineConfig(pipelineName, StageConfigMother.custom(stageName, jobName)));
return cruiseConfig;
}
};
}
public UpdateConfigCommand changeJobNameCommand(final String md5, final String pipelineName, final String stageName, final String oldJobName, final String newJobName) {
return new NoOverwriteUpdateConfigCommand() {
@Override
public String unmodifiedMd5() {
return md5;
}
@Override
public CruiseConfig update(CruiseConfig cruiseConfig) throws Exception {
JobConfig job = cruiseConfig.findJob(pipelineName, stageName, oldJobName);
ReflectionUtil.setField(job, "jobName", new CaseInsensitiveString(newJobName));
return cruiseConfig;
}
};
}
/*public void addPipelineGroup(String groupName) {
CruiseConfig config = loadForEdit();
config.addGroup(groupName);
writeConfigFile(config);
}*/
public static class AdminUserMother {
public static Admin[] adminUsers(String... userNames) {
Admin[] result = new Admin[userNames.length];
for (int i = 0; i < userNames.length; i++) {
String userName = userNames[i];
result[i] = new AdminUser(new CaseInsensitiveString(userName));
}
return result;
}
}
public static void clearConfigVersions() {
SystemEnvironment env = new SystemEnvironment();
FileUtils.deleteQuietly(env.getConfigRepoDir());
}
public static void withServerIdImmutability(Procedure fn) {
try {
SystemEnvironment.enforceServerIdImmutability.set(true);
fn.call();
} finally {
SystemEnvironment.enforceServerIdImmutability.set(false);
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.converter.crypto;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.util.IOHelper;
import org.bouncycastle.bcpg.BCPGOutputStream;
import org.bouncycastle.bcpg.CompressionAlgorithmTags;
import org.bouncycastle.bcpg.HashAlgorithmTags;
import org.bouncycastle.bcpg.SymmetricKeyAlgorithmTags;
import org.bouncycastle.bcpg.sig.KeyFlags;
import org.bouncycastle.openpgp.PGPCompressedDataGenerator;
import org.bouncycastle.openpgp.PGPEncryptedDataGenerator;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPLiteralData;
import org.bouncycastle.openpgp.PGPLiteralDataGenerator;
import org.bouncycastle.openpgp.PGPPrivateKey;
import org.bouncycastle.openpgp.PGPPublicKey;
import org.bouncycastle.openpgp.PGPPublicKeyRing;
import org.bouncycastle.openpgp.PGPPublicKeyRingCollection;
import org.bouncycastle.openpgp.PGPSecretKey;
import org.bouncycastle.openpgp.PGPSecretKeyRing;
import org.bouncycastle.openpgp.PGPSecretKeyRingCollection;
import org.bouncycastle.openpgp.PGPSignature;
import org.bouncycastle.openpgp.PGPSignatureGenerator;
import org.bouncycastle.openpgp.PGPUtil;
import org.bouncycastle.openpgp.operator.bc.BcKeyFingerprintCalculator;
import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcePBEKeyEncryptionMethodGenerator;
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcePGPDataEncryptorBuilder;
import org.bouncycastle.openpgp.operator.jcajce.JcePublicKeyKeyEncryptionMethodGenerator;
import org.junit.Before;
import org.junit.Test;
public class PGPDataFormatTest extends AbstractPGPDataFormatTest {
private static final String PUB_KEY_RING_SUBKEYS_FILE_NAME = "org/apache/camel/component/crypto/pubringSubKeys.gpg";
private static final String SEC_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/secring.gpg";
private static final String PUB_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/pubring.gpg";
PGPDataFormat encryptor = new PGPDataFormat();
PGPDataFormat decryptor = new PGPDataFormat();
@Before
public void setUpEncryptorAndDecryptor() {
// the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption
encryptor.setKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME);
encryptor.setSignatureKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg");
encryptor.setSignaturePassword("Abcd1234");
encryptor.setKeyUserid("keyflag");
encryptor.setSignatureKeyUserid("keyflag");
encryptor.setIntegrity(false);
encryptor.setFileName("fileNameABC");
// the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption
decryptor.setKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg");
decryptor.setSignatureKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME);
decryptor.setPassword("Abcd1234");
decryptor.setSignatureKeyUserid("keyflag");
}
protected String getKeyFileName() {
return PUB_KEY_RING_FILE_NAME;
}
protected String getKeyFileNameSec() {
return SEC_KEY_RING_FILE_NAME;
}
protected String getKeyUserId() {
return "sdude@nowhere.net";
}
protected List<String> getKeyUserIds() {
List<String> userids = new ArrayList<String>(2);
userids.add("second");
userids.add(getKeyUserId());
return userids;
}
protected List<String> getSignatureKeyUserIds() {
List<String> userids = new ArrayList<String>(2);
userids.add("second");
userids.add(getKeyUserId());
return userids;
}
protected String getKeyPassword() {
return "sdude";
}
protected String getProvider() {
return "BC";
}
protected int getAlgorithm() {
return SymmetricKeyAlgorithmTags.TRIPLE_DES;
}
protected int getHashAlgorithm() {
return HashAlgorithmTags.SHA256;
}
protected int getCompressionAlgorithm() {
return CompressionAlgorithmTags.BZIP2;
}
@Test
public void testEncryption() throws Exception {
doRoundTripEncryptionTests("direct:inline");
}
@Test
public void testEncryption2() throws Exception {
doRoundTripEncryptionTests("direct:inline2");
}
@Test
public void testEncryptionArmor() throws Exception {
doRoundTripEncryptionTests("direct:inline-armor");
}
@Test
public void testEncryptionSigned() throws Exception {
doRoundTripEncryptionTests("direct:inline-sign");
}
@Test
public void testEncryptionKeyRingByteArray() throws Exception {
doRoundTripEncryptionTests("direct:key-ring-byte-array");
}
@Test
public void testEncryptionSignedKeyRingByteArray() throws Exception {
doRoundTripEncryptionTests("direct:sign-key-ring-byte-array");
}
@Test
public void testSeveralSignerKeys() throws Exception {
doRoundTripEncryptionTests("direct:several-signer-keys");
}
@Test
public void testOneUserIdWithServeralKeys() throws Exception {
doRoundTripEncryptionTests("direct:one-userid-several-keys");
}
@Test
public void testKeyAccess() throws Exception {
doRoundTripEncryptionTests("direct:key_access");
}
@Test
public void testVerifyExceptionNoPublicKeyFoundCorrespondingToSignatureUserIds() throws Exception {
setupExpectations(context, 1, "mock:encrypted");
MockEndpoint exception = setupExpectations(context, 1, "mock:exception");
String payload = "Hi Alice, Be careful Eve is listening, signed Bob";
Map<String, Object> headers = getHeaders();
template.sendBodyAndHeaders("direct:verify_exception_sig_userids", payload, headers);
assertMockEndpointsSatisfied();
checkThrownException(exception, IllegalArgumentException.class, null, "No public key found for the key ID(s)");
}
@Test
public void testVerifyExceptionNoPassphraseSpecifiedForSignatureKeyUserId() throws Exception {
MockEndpoint exception = setupExpectations(context, 1, "mock:exception");
String payload = "Hi Alice, Be careful Eve is listening, signed Bob";
Map<String, Object> headers = new HashMap<String, Object>();
// add signature user id which does not have a passphrase
headers.put(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID, "userIDWithNoPassphrase");
// the following entry is necessary for the dynamic test
headers.put(PGPKeyAccessDataFormat.KEY_USERID, "second");
template.sendBodyAndHeaders("direct:several-signer-keys", payload, headers);
assertMockEndpointsSatisfied();
checkThrownException(exception, IllegalArgumentException.class, null, "No passphrase specified for signature key user ID");
}
/**
* You get three keys with the UserId "keyflag", a primary key and its two
* sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be
* used for signing and the sub-key with KeyFlag
* {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or
* {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption.
*
* @throws Exception
*/
@Test
public void testKeyFlagSelectsCorrectKey() throws Exception {
MockEndpoint mockKeyFlag = getMockEndpoint("mock:encrypted_keyflag");
mockKeyFlag.setExpectedMessageCount(1);
template.sendBody("direct:keyflag", "Test Message");
assertMockEndpointsSatisfied();
List<Exchange> exchanges = mockKeyFlag.getExchanges();
assertEquals(1, exchanges.size());
Exchange exchange = exchanges.get(0);
Message inMess = exchange.getIn();
assertNotNull(inMess);
// must contain exactly one encryption key and one signature
assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_ENCRYPTION_KEYS));
assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_SIGNING_KEYS));
}
/**
* You get three keys with the UserId "keyflag", a primary key and its two
* sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be
* used for signing and the sub-key with KeyFlag
* {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or
* {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption.
* <p>
* Tests also the decryption and verifying part with the subkeys.
*
* @throws Exception
*/
@Test
public void testDecryptVerifyWithSubkey() throws Exception {
// do not use doRoundTripEncryptionTests("direct:subkey"); because otherwise you get an error in the dynamic test
String payload = "Test Message";
MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted");
mockSubkey.expectedBodiesReceived(payload);
template.sendBody("direct:subkey", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testEmptyBody() throws Exception {
String payload = "";
MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted");
mockSubkey.expectedBodiesReceived(payload);
template.sendBody("direct:subkey", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testExceptionDecryptorIncorrectInputFormatNoPGPMessage() throws Exception {
String payload = "Not Correct Format";
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkeyUnmarshal", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format.");
}
@Test
public void testExceptionDecryptorIncorrectInputFormatPGPSignedData() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
createSignature(bos);
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkeyUnmarshal", bos.toByteArray());
assertMockEndpointsSatisfied();
checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format.");
}
@Test
public void testExceptionDecryptorIncorrectInputNoCompression() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
createEncryptedNonCompressedData(bos, PUB_KEY_RING_SUBKEYS_FILE_NAME);
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkeyUnmarshal", bos.toByteArray());
assertMockEndpointsSatisfied();
checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format.");
}
@Test
public void testExceptionDecryptorNoKeyFound() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
createEncryptedNonCompressedData(bos, PUB_KEY_RING_FILE_NAME);
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkeyUnmarshal", bos.toByteArray());
assertMockEndpointsSatisfied();
checkThrownException(mock, PGPException.class, null,
"PGP message is encrypted with a key which could not be found in the Secret Keyring");
}
void createEncryptedNonCompressedData(ByteArrayOutputStream bos, String keyringPath) throws Exception, IOException, PGPException,
UnsupportedEncodingException {
PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5)
.setSecureRandom(new SecureRandom()).setProvider(getProvider()));
encGen.addMethod(new JcePublicKeyKeyEncryptionMethodGenerator(readPublicKey(keyringPath)));
OutputStream encOut = encGen.open(bos, new byte[512]);
PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator();
OutputStream litOut = litData.open(encOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[512]);
try {
litOut.write("Test Message Without Compression".getBytes("UTF-8"));
litOut.flush();
} finally {
IOHelper.close(litOut);
IOHelper.close(encOut, bos);
}
}
private void createSignature(OutputStream out) throws Exception {
PGPSecretKey pgpSec = readSecretKey();
PGPPrivateKey pgpPrivKey = pgpSec.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(getProvider()).build(
"sdude".toCharArray()));
PGPSignatureGenerator sGen = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpSec.getPublicKey().getAlgorithm(),
HashAlgorithmTags.SHA1).setProvider(getProvider()));
sGen.init(PGPSignature.BINARY_DOCUMENT, pgpPrivKey);
BCPGOutputStream bOut = new BCPGOutputStream(out);
InputStream fIn = new ByteArrayInputStream("Test Signature".getBytes("UTF-8"));
int ch;
while ((ch = fIn.read()) >= 0) {
sGen.update((byte) ch);
}
fIn.close();
sGen.generate().encode(bOut);
}
static PGPSecretKey readSecretKey() throws Exception {
InputStream input = new ByteArrayInputStream(getSecKeyRing());
PGPSecretKeyRingCollection pgpSec = new PGPSecretKeyRingCollection(PGPUtil.getDecoderStream(input),
new BcKeyFingerprintCalculator());
@SuppressWarnings("rawtypes")
Iterator keyRingIter = pgpSec.getKeyRings();
while (keyRingIter.hasNext()) {
PGPSecretKeyRing keyRing = (PGPSecretKeyRing) keyRingIter.next();
@SuppressWarnings("rawtypes")
Iterator keyIter = keyRing.getSecretKeys();
while (keyIter.hasNext()) {
PGPSecretKey key = (PGPSecretKey) keyIter.next();
if (key.isSigningKey()) {
return key;
}
}
}
throw new IllegalArgumentException("Can't find signing key in key ring.");
}
static PGPPublicKey readPublicKey(String keyringPath) throws Exception {
InputStream input = new ByteArrayInputStream(getKeyRing(keyringPath));
PGPPublicKeyRingCollection pgpPub = new PGPPublicKeyRingCollection(PGPUtil.getDecoderStream(input),
new BcKeyFingerprintCalculator());
@SuppressWarnings("rawtypes")
Iterator keyRingIter = pgpPub.getKeyRings();
while (keyRingIter.hasNext()) {
PGPPublicKeyRing keyRing = (PGPPublicKeyRing) keyRingIter.next();
@SuppressWarnings("rawtypes")
Iterator keyIter = keyRing.getPublicKeys();
while (keyIter.hasNext()) {
PGPPublicKey key = (PGPPublicKey) keyIter.next();
if (key.isEncryptionKey()) {
return key;
}
}
}
throw new IllegalArgumentException("Can't find encryption key in key ring.");
}
@Test
public void testExceptionDecryptorIncorrectInputFormatSymmetricEncryptedData() throws Exception {
byte[] payload = "Not Correct Format".getBytes("UTF-8");
ByteArrayOutputStream bos = new ByteArrayOutputStream();
PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5)
.setSecureRandom(new SecureRandom()).setProvider(getProvider()));
encGen.addMethod(new JcePBEKeyEncryptionMethodGenerator("pw".toCharArray()));
OutputStream encOut = encGen.open(bos, new byte[1024]);
PGPCompressedDataGenerator comData = new PGPCompressedDataGenerator(CompressionAlgorithmTags.ZIP);
OutputStream comOut = new BufferedOutputStream(comData.open(encOut));
PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator();
OutputStream litOut = litData.open(comOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[1024]);
litOut.write(payload);
litOut.flush();
litOut.close();
comOut.close();
encOut.close();
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkeyUnmarshal", bos.toByteArray());
assertMockEndpointsSatisfied();
checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format.");
}
@Test
public void testExceptionForSignatureVerificationOptionNoSignatureAllowed() throws Exception {
decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED);
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkey", "Test Message");
assertMockEndpointsSatisfied();
checkThrownException(mock, PGPException.class, null, "PGP message contains a signature although a signature is not expected");
}
@Test
public void testExceptionForSignatureVerificationOptionRequired() throws Exception {
encryptor.setSignatureKeyUserid(null); // no signature
decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED);
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.expectedMessageCount(1);
template.sendBody("direct:subkey", "Test Message");
assertMockEndpointsSatisfied();
checkThrownException(mock, PGPException.class, null, "PGP message does not contain any signatures although a signature is expected");
}
@Test
public void testSignatureVerificationOptionIgnore() throws Exception {
// encryptor is sending a PGP message with signature! Decryptor is ignoreing the signature
decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_IGNORE);
decryptor.setSignatureKeyUserids(null);
decryptor.setSignatureKeyFileName(null); // no public keyring! --> no signature validation possible
String payload = "Test Message";
MockEndpoint mock = getMockEndpoint("mock:unencrypted");
mock.expectedBodiesReceived(payload);
template.sendBody("direct:subkey", payload);
assertMockEndpointsSatisfied();
}
protected RouteBuilder[] createRouteBuilders() {
return new RouteBuilder[] {new RouteBuilder() {
public void configure() throws Exception {
onException(Exception.class).handled(true).to("mock:exception");
// START SNIPPET: pgp-format
// Public Key FileName
String keyFileName = getKeyFileName();
// Private Key FileName
String keyFileNameSec = getKeyFileNameSec();
// Keyring Userid Used to Encrypt
String keyUserid = getKeyUserId();
// Private key password
String keyPassword = getKeyPassword();
from("direct:inline").marshal().pgp(keyFileName, keyUserid).to("mock:encrypted").unmarshal()
.pgp(keyFileNameSec, null, keyPassword).to("mock:unencrypted");
// END SNIPPET: pgp-format
// START SNIPPET: pgp-format-header
PGPDataFormat pgpEncrypt = new PGPDataFormat();
pgpEncrypt.setKeyFileName(keyFileName);
pgpEncrypt.setKeyUserid(keyUserid);
pgpEncrypt.setProvider(getProvider());
pgpEncrypt.setAlgorithm(getAlgorithm());
pgpEncrypt.setCompressionAlgorithm(getCompressionAlgorithm());
PGPDataFormat pgpDecrypt = new PGPDataFormat();
pgpDecrypt.setKeyFileName(keyFileNameSec);
pgpDecrypt.setPassword(keyPassword);
pgpDecrypt.setProvider(getProvider());
pgpDecrypt.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED);
from("direct:inline2").marshal(pgpEncrypt).to("mock:encrypted").unmarshal(pgpDecrypt).to("mock:unencrypted");
from("direct:inline-armor").marshal().pgp(keyFileName, keyUserid, null, true, true).to("mock:encrypted").unmarshal()
.pgp(keyFileNameSec, null, keyPassword, true, true).to("mock:unencrypted");
// END SNIPPET: pgp-format-header
// START SNIPPET: pgp-format-signature
PGPDataFormat pgpSignAndEncrypt = new PGPDataFormat();
pgpSignAndEncrypt.setKeyFileName(keyFileName);
pgpSignAndEncrypt.setKeyUserid(keyUserid);
pgpSignAndEncrypt.setSignatureKeyFileName(keyFileNameSec);
PGPPassphraseAccessor passphraseAccessor = getPassphraseAccessor();
pgpSignAndEncrypt.setSignatureKeyUserid("Super <sdude@nowhere.net>"); // must be the exact user Id because passphrase is searched in accessor
pgpSignAndEncrypt.setPassphraseAccessor(passphraseAccessor);
pgpSignAndEncrypt.setProvider(getProvider());
pgpSignAndEncrypt.setAlgorithm(getAlgorithm());
pgpSignAndEncrypt.setHashAlgorithm(getHashAlgorithm());
pgpSignAndEncrypt.setCompressionAlgorithm(getCompressionAlgorithm());
PGPDataFormat pgpVerifyAndDecrypt = new PGPDataFormat();
pgpVerifyAndDecrypt.setKeyFileName(keyFileNameSec);
pgpVerifyAndDecrypt.setPassword(keyPassword);
pgpVerifyAndDecrypt.setSignatureKeyFileName(keyFileName);
pgpVerifyAndDecrypt.setProvider(getProvider());
pgpVerifyAndDecrypt.setSignatureKeyUserid(keyUserid); // restrict verification to public keys with certain User ID
from("direct:inline-sign").marshal(pgpSignAndEncrypt).to("mock:encrypted").unmarshal(pgpVerifyAndDecrypt)
.to("mock:unencrypted");
// END SNIPPET: pgp-format-signature
// test verifying exception, no public key found corresponding to signature key userIds
from("direct:verify_exception_sig_userids").marshal(pgpSignAndEncrypt).to("mock:encrypted")
.setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERIDS).constant(Arrays.asList(new String[] {"wrong1", "wrong2" }))
.setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("wrongUserID").unmarshal(pgpVerifyAndDecrypt)
.to("mock:unencrypted");
/* ---- key ring as byte array -- */
// START SNIPPET: pgp-format-key-ring-byte-array
PGPDataFormat pgpEncryptByteArray = new PGPDataFormat();
pgpEncryptByteArray.setEncryptionKeyRing(getPublicKeyRing());
pgpEncryptByteArray.setKeyUserids(getKeyUserIds());
pgpEncryptByteArray.setProvider(getProvider());
pgpEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.DES);
pgpEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.UNCOMPRESSED);
PGPDataFormat pgpDecryptByteArray = new PGPDataFormat();
pgpDecryptByteArray.setEncryptionKeyRing(getSecKeyRing());
pgpDecryptByteArray.setPassphraseAccessor(passphraseAccessor);
pgpDecryptByteArray.setProvider(getProvider());
from("direct:key-ring-byte-array").streamCaching().marshal(pgpEncryptByteArray).to("mock:encrypted")
.unmarshal(pgpDecryptByteArray).to("mock:unencrypted");
// END SNIPPET: pgp-format-key-ring-byte-array
// START SNIPPET: pgp-format-signature-key-ring-byte-array
PGPDataFormat pgpSignAndEncryptByteArray = new PGPDataFormat();
pgpSignAndEncryptByteArray.setKeyUserid(keyUserid);
pgpSignAndEncryptByteArray.setSignatureKeyRing(getSecKeyRing());
pgpSignAndEncryptByteArray.setSignatureKeyUserid(keyUserid);
pgpSignAndEncryptByteArray.setSignaturePassword(keyPassword);
pgpSignAndEncryptByteArray.setProvider(getProvider());
pgpSignAndEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.BLOWFISH);
pgpSignAndEncryptByteArray.setHashAlgorithm(HashAlgorithmTags.RIPEMD160);
pgpSignAndEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.ZLIB);
PGPDataFormat pgpVerifyAndDecryptByteArray = new PGPDataFormat();
pgpVerifyAndDecryptByteArray.setPassphraseAccessor(passphraseAccessor);
pgpVerifyAndDecryptByteArray.setEncryptionKeyRing(getSecKeyRing());
pgpVerifyAndDecryptByteArray.setProvider(getProvider());
// restrict verification to public keys with certain User ID
pgpVerifyAndDecryptByteArray.setSignatureKeyUserids(getSignatureKeyUserIds());
pgpVerifyAndDecryptByteArray.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED);
from("direct:sign-key-ring-byte-array").streamCaching()
// encryption key ring can also be set as header
.setHeader(PGPDataFormat.ENCRYPTION_KEY_RING).constant(getPublicKeyRing()).marshal(pgpSignAndEncryptByteArray)
// it is recommended to remove the header immediately when it is no longer needed
.removeHeader(PGPDataFormat.ENCRYPTION_KEY_RING).to("mock:encrypted")
// signature key ring can also be set as header
.setHeader(PGPDataFormat.SIGNATURE_KEY_RING).constant(getPublicKeyRing()).unmarshal(pgpVerifyAndDecryptByteArray)
// it is recommended to remove the header immediately when it is no longer needed
.removeHeader(PGPDataFormat.SIGNATURE_KEY_RING).to("mock:unencrypted");
// END SNIPPET: pgp-format-signature-key-ring-byte-array
// START SNIPPET: pgp-format-several-signer-keys
PGPDataFormat pgpSignAndEncryptSeveralSignerKeys = new PGPDataFormat();
pgpSignAndEncryptSeveralSignerKeys.setKeyUserid(keyUserid);
pgpSignAndEncryptSeveralSignerKeys.setEncryptionKeyRing(getPublicKeyRing());
pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyRing(getSecKeyRing());
List<String> signerUserIds = new ArrayList<String>();
signerUserIds.add("Third (comment third) <email@third.com>");
signerUserIds.add("Second <email@second.com>");
pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyUserids(signerUserIds);
Map<String, String> userId2Passphrase = new HashMap<String, String>();
userId2Passphrase.put("Third (comment third) <email@third.com>", "sdude");
userId2Passphrase.put("Second <email@second.com>", "sdude");
PGPPassphraseAccessor passphraseAccessorSeveralKeys = new DefaultPGPPassphraseAccessor(userId2Passphrase);
pgpSignAndEncryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessorSeveralKeys);
PGPDataFormat pgpVerifyAndDecryptSeveralSignerKeys = new PGPDataFormat();
pgpVerifyAndDecryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessor);
pgpVerifyAndDecryptSeveralSignerKeys.setEncryptionKeyRing(getSecKeyRing());
pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyRing(getPublicKeyRing());
pgpVerifyAndDecryptSeveralSignerKeys.setProvider(getProvider());
// only specify one expected signature
List<String> expectedSigUserIds = new ArrayList<String>();
expectedSigUserIds.add("Second <email@second.com>");
pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyUserids(expectedSigUserIds);
from("direct:several-signer-keys").streamCaching().marshal(pgpSignAndEncryptSeveralSignerKeys).to("mock:encrypted")
.unmarshal(pgpVerifyAndDecryptSeveralSignerKeys).to("mock:unencrypted");
// END SNIPPET: pgp-format-several-signer-keys
// test encryption by several key and signing by serveral keys where the keys are specified by one User ID part
PGPDataFormat pgpSignAndEncryptOneUserIdWithServeralKeys = new PGPDataFormat();
pgpSignAndEncryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getPublicKeyRing());
pgpSignAndEncryptOneUserIdWithServeralKeys.setSignatureKeyRing(getSecKeyRing());
// the two private keys have the same password therefore we do not need a passphrase accessor
pgpSignAndEncryptOneUserIdWithServeralKeys.setPassword(getKeyPassword());
PGPDataFormat pgpVerifyAndDecryptOneUserIdWithServeralKeys = new PGPDataFormat();
pgpVerifyAndDecryptOneUserIdWithServeralKeys.setPassword(getKeyPassword());
pgpVerifyAndDecryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getSecKeyRing());
pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyRing(getPublicKeyRing());
pgpVerifyAndDecryptOneUserIdWithServeralKeys.setProvider(getProvider());
pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyUserids(expectedSigUserIds);
from("direct:one-userid-several-keys")
// there are two keys which have a User ID which contains the string "econd"
.setHeader(PGPKeyAccessDataFormat.KEY_USERID)
.constant("econd")
.setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID)
.constant("econd")
.marshal(pgpSignAndEncryptOneUserIdWithServeralKeys)
// it is recommended to remove the header immediately when it is no longer needed
.removeHeader(PGPKeyAccessDataFormat.KEY_USERID)
.removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID)
.to("mock:encrypted")
// only specify one expected signature key, to check the first signature
.setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID)
.constant("Second <email@second.com>")
.unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys)
// do it again but now check the second signature key
// there are two keys which have a User ID which contains the string "econd"
.setHeader(PGPKeyAccessDataFormat.KEY_USERID).constant("econd").setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID)
.constant("econd").marshal(pgpSignAndEncryptOneUserIdWithServeralKeys)
// it is recommended to remove the header immediately when it is no longer needed
.removeHeader(PGPKeyAccessDataFormat.KEY_USERID).removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID)
// only specify one expected signature key, to check the second signature
.setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("Third (comment third) <email@third.com>")
.unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys).to("mock:unencrypted");
}
}, new RouteBuilder() {
public void configure() throws Exception {
onException(Exception.class).handled(true).to("mock:exception");
from("direct:keyflag").marshal(encryptor).to("mock:encrypted_keyflag");
// test that the correct subkey is selected during decrypt and verify
from("direct:subkey").marshal(encryptor).to("mock:encrypted").unmarshal(decryptor).to("mock:unencrypted");
from("direct:subkeyUnmarshal").unmarshal(decryptor).to("mock:unencrypted");
}
}, new RouteBuilder() {
public void configure() throws Exception {
PGPPublicKeyAccessor publicKeyAccessor = new DefaultPGPPublicKeyAccessor(getPublicKeyRing());
//password cannot be set dynamically!
PGPSecretKeyAccessor secretKeyAccessor = new DefaultPGPSecretKeyAccessor(getSecKeyRing(), "sdude", getProvider());
PGPKeyAccessDataFormat dfEncryptSignKeyAccess = new PGPKeyAccessDataFormat();
dfEncryptSignKeyAccess.setPublicKeyAccessor(publicKeyAccessor);
dfEncryptSignKeyAccess.setSecretKeyAccessor(secretKeyAccessor);
dfEncryptSignKeyAccess.setKeyUserid(getKeyUserId());
dfEncryptSignKeyAccess.setSignatureKeyUserid(getKeyUserId());
PGPKeyAccessDataFormat dfDecryptVerifyKeyAccess = new PGPKeyAccessDataFormat();
dfDecryptVerifyKeyAccess.setPublicKeyAccessor(publicKeyAccessor);
dfDecryptVerifyKeyAccess.setSecretKeyAccessor(secretKeyAccessor);
dfDecryptVerifyKeyAccess.setSignatureKeyUserid(getKeyUserId());
from("direct:key_access").marshal(dfEncryptSignKeyAccess).to("mock:encrypted").unmarshal(dfDecryptVerifyKeyAccess)
.to("mock:unencrypted");
}
} };
}
public static byte[] getPublicKeyRing() throws Exception {
return getKeyRing(PUB_KEY_RING_FILE_NAME);
}
public static byte[] getSecKeyRing() throws Exception {
return getKeyRing(SEC_KEY_RING_FILE_NAME);
}
private static byte[] getKeyRing(String fileName) throws IOException {
InputStream is = PGPDataFormatTest.class.getClassLoader().getResourceAsStream(fileName);
ByteArrayOutputStream output = new ByteArrayOutputStream();
IOHelper.copyAndCloseInput(is, output);
output.close();
return output.toByteArray();
}
public static PGPPassphraseAccessor getPassphraseAccessor() {
Map<String, String> userId2Passphrase = Collections.singletonMap("Super <sdude@nowhere.net>", "sdude");
PGPPassphraseAccessor passphraseAccessor = new DefaultPGPPassphraseAccessor(userId2Passphrase);
return passphraseAccessor;
}
public static void checkThrownException(MockEndpoint mock, Class<? extends Exception> cl,
Class<? extends Exception> expectedCauseClass, String expectedMessagePart) throws Exception {
Exception e = (Exception) mock.getExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT);
assertNotNull("Expected excpetion " + cl.getName() + " missing", e);
if (e.getClass() != cl) {
String stackTrace = getStrackTrace(e);
fail("Exception " + cl.getName() + " excpected, but was " + e.getClass().getName() + ": " + stackTrace);
}
if (expectedMessagePart != null) {
if (e.getMessage() == null) {
fail("Expected excption does not contain a message. Stack trace: " + getStrackTrace(e));
} else {
if (!e.getMessage().contains(expectedMessagePart)) {
fail("Expected excption message does not contain a expected message part " + expectedMessagePart + ". Stack trace: "
+ getStrackTrace(e));
}
}
}
if (expectedCauseClass != null) {
Throwable cause = e.getCause();
assertNotNull("Expected cause exception" + expectedCauseClass.getName() + " missing", cause);
if (expectedCauseClass != cause.getClass()) {
fail("Cause exception " + expectedCauseClass.getName() + " expected, but was " + cause.getClass().getName() + ": "
+ getStrackTrace(e));
}
}
}
public static String getStrackTrace(Exception e) throws UnsupportedEncodingException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintWriter w = new PrintWriter(os);
e.printStackTrace(w);
w.close();
String stackTrace = new String(os.toByteArray(), "UTF-8");
return stackTrace;
}
}
|
|
package noo.promise;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JavaScriptObject;
import java.util.ArrayList;
import java.util.List;
/**
* @author Tal Shani
*/
final class PromiseEmul<T> implements Promise<T> {
class PromiseCallbackImpl implements PromiseCallback<T> {
private boolean done = false;
@Override
public void reject(Throwable reason) {
if (!done) {
done = true;
PromiseEmul.this.reject(reason);
}
}
@Override
public void resolveValue(T value) {
if (!done) {
done = true;
PromiseEmul.this.resolveValue(value);
}
}
@Override
public void resolvePromise(Promise<T> promise) {
if (!done) {
done = true;
PromiseEmul.this.resolvePromise(promise);
}
}
}
private static <T> HandlersCollection<T> createHandlersCollection() {
return GWT.isScript() ? NativeHandlersCollection.<T>create() : new JVMHandlersCollection<T>();
}
private Throwable reason = null;
private STATE state = STATE.PENDING;
private HandlersCollection<T> successSubscribers = PromiseEmul.createHandlersCollection();
private HandlersCollection<Throwable> errorSubscribers = PromiseEmul.createHandlersCollection();
private T value = null;
private final PromiseLoggingHelper logger = new PromiseLoggingHelper();
public PromiseEmul(PromiseResolver<T> resolver) {
doResolve(resolver);
}
@Override
public Promise<T> then(final PromiseHandler<? super T> onFulfilled, final PromiseHandler<Throwable> onRejected) {
logger.enterThen(onFulfilled, onRejected);
return new PromiseEmul<T>(new PromiseResolver<T>() {
@Override
public void resolve(final PromiseCallback<T> callback) {
PromiseHandler<T> successHandler = onFulfilled != null ? new PromiseHandler<T>() {
@Override
public void handle(T value) {
try {
logger.callingThenHandler(onFulfilled);
callback.resolveValue(value);
onFulfilled.handle(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
} : new PromiseHandler<T>() {
@Override
public void handle(T value) {
try {
callback.resolveValue(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
};
PromiseHandler<Throwable> errorHandler = onRejected != null ? new PromiseHandler<Throwable>() {
@Override
public void handle(Throwable value) {
try {
logger.callingCatchHandler(onRejected);
onRejected.handle(value);
callback.reject(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
} : new PromiseHandler<Throwable>() {
@Override
public void handle(Throwable value) {
try {
callback.reject(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
};
handle(successHandler, errorHandler);
}
});
}
@Override
public Promise<T> catchIt(PromiseHandler<Throwable> onRejected) {
return then(null, onRejected);
}
@Override
public Promise<T> then(PromiseHandler<? super T> onFulfilled) {
return then(onFulfilled, null);
}
@Override
public <R> Promise<R> then(final PromiseTransformingHandler<R, T> onFulfilled) {
logger.enterThen(onFulfilled);
return new PromiseEmul<R>(new PromiseResolver<R>() {
@Override
public void resolve(final PromiseCallback<R> callback) {
handle(new PromiseHandler<T>() {
@Override
public void handle(T value) {
try {
logger.callingThenHandler(onFulfilled);
callback.resolvePromise(onFulfilled.handle(value));
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
}, new PromiseHandler<Throwable>() {
@Override
public void handle(Throwable value) {
try {
callback.reject(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
});
}
});
}
@Override
public Promise<T> catchIt(final PromiseTransformingHandler<T, Throwable> onRejected) {
logger.enterCatch(onRejected);
return new PromiseEmul<T>(new PromiseResolver<T>() {
@Override
public void resolve(final PromiseCallback<T> callback) {
handle(new PromiseHandler<T>() {
@Override
public void handle(T value) {
try {
callback.resolveValue(value);
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
}, new PromiseHandler<Throwable>() {
@Override
public void handle(Throwable value) {
try {
logger.callingCatchHandler(onRejected);
callback.resolvePromise(onRejected.handle(value));
} catch (Throwable t) {
catchException(t);
callback.reject(t);
}
}
});
}
});
}
/**
* Creates a resolver that will resolvePromise it's callback one the given promise is resolved
*
* @param promise
* @return
*/
private PromiseResolver<T> createResolver(final Promise<T> promise) {
return new PromiseResolver<T>() {
@Override
public void resolve(final PromiseCallback<T> callback) {
promise.then(new PromiseHandler<T>() {
@Override
public void handle(T value) {
callback.resolveValue(value);
}
}).catchIt(new PromiseHandler<Throwable>() {
@Override
public void handle(Throwable value) {
callback.reject(reason);
}
});
}
};
}
private void doResolve(PromiseResolver<T> fn) {
PromiseCallbackImpl callback = new PromiseCallbackImpl();
try {
fn.resolve(callback);
} catch (Throwable throwable) {
callback.reject(throwable);
catchException(throwable);
}
}
private void fireHandlers(STATE state) {
if (state == STATE.PENDING || this.state != STATE.PENDING) return;
this.state = state;
// because we changed the state no more subscriber will join the subscribers array
if (state == STATE.FULFILLED) {
int length = successSubscribers.length();
for (int i = 0; i < length; i++) {
handleFulfilled(successSubscribers.get(i));
}
} else {
int length = errorSubscribers.length();
for (int i = 0; i < length; i++) {
handleRejected(errorSubscribers.get(i));
}
}
successSubscribers = null;
errorSubscribers = null;
}
private void fulfill(T value) {
this.value = value;
fireHandlers(STATE.FULFILLED);
}
private void handle(final PromiseHandler<T> onFulfilled, final PromiseHandler<Throwable> onRejected) {
if (state == STATE.PENDING) {
if (onFulfilled != null) successSubscribers.push(onFulfilled);
if (onRejected != null) errorSubscribers.push(onRejected);
} else if (state == STATE.FULFILLED) {
if (onFulfilled != null) {
Immediate.setImmediate(new ImmediateCommand() {
@Override
public void execute() {
try {
onFulfilled.handle(value);
} catch (Throwable e) {
catchException(e);
}
}
});
}
} else if (state == STATE.REJECTED) {
if (onRejected != null) {
Immediate.setImmediate(new ImmediateCommand() {
@Override
public void execute() {
try {
onRejected.handle(reason);
} catch (Throwable e) {
catchException(e);
}
}
});
}
}
}
private void catchException(Throwable e) {
try {
GWT.UncaughtExceptionHandler uncaughtExceptionHandler = Promises.getUncaughtExceptionHandler();
if (uncaughtExceptionHandler != null) {
uncaughtExceptionHandler.onUncaughtException(e);
}
} catch (Throwable ignore) {
}
}
private void handleFulfilled(PromiseHandler<T> handler) {
try {
handler.handle(value);
} catch (Throwable e) {
catchException(e);
}
}
private void handleRejected(PromiseHandler<Throwable> handler) {
try {
handler.handle(reason);
} catch (Throwable e) {
catchException(e);
}
}
private void reject(Throwable reason) {
this.reason = reason;
fireHandlers(STATE.REJECTED);
}
private void resolveValue(T value) {
try {
fulfill(value);
} catch (Exception e) {
reject(e);
}
}
private void resolvePromise(Promise<T> promise) {
try {
PromiseResolver<T> resolver = createResolver(promise);
doResolve(resolver);
} catch (Exception e) {
reject(e);
}
}
static enum STATE {
PENDING, FULFILLED, REJECTED;
}
static interface HandlersCollection<T> {
int length();
PromiseHandler<T> get(int i);
void push(PromiseHandler<T> handler);
}
static final class NativeHandlersCollection<T> extends JavaScriptObject implements HandlersCollection<T> {
protected NativeHandlersCollection() {
}
public static <T> NativeHandlersCollection<T> create() {
return JavaScriptObject.createArray().cast();
}
public native int length() /*-{
return this.length;
}-*/;
public native PromiseHandler<T> get(int i) /*-{
return this[i];
}-*/;
public native void push(PromiseHandler<T> handler) /*-{
this[this.length] = handler;
}-*/;
}
static final class JVMHandlersCollection<T> implements HandlersCollection<T> {
private final List<PromiseHandler<T>> ar = new ArrayList<PromiseHandler<T>>();
public int length() {
return ar.size();
}
public PromiseHandler<T> get(int i) {
return ar.get(i);
}
public void push(PromiseHandler<T> handler) {
ar.set(ar.size(), handler);
}
}
}
|
|
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.features.project.intellij;
import static org.junit.Assert.assertThat;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.features.project.intellij.lang.android.AndroidResourceFolder;
import com.facebook.buck.features.project.intellij.model.folders.ExcludeFolder;
import com.facebook.buck.features.project.intellij.model.folders.IjFolder;
import com.facebook.buck.features.project.intellij.model.folders.JavaResourceFolder;
import com.facebook.buck.features.project.intellij.model.folders.SourceFolder;
import com.facebook.buck.features.project.intellij.model.folders.TestFolder;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.jvm.java.DefaultJavaPackageFinder;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import org.hamcrest.Matchers;
import org.junit.Test;
public class IjSourceRootSimplifierTest {
private static IjFolder buildExcludeFolder(String path) {
return new ExcludeFolder(Paths.get(path));
}
private static IjFolder buildSourceFolder(String path) {
return new SourceFolder(Paths.get(path), true);
}
private static IjFolder buildNoPrefixSourceFolder(String path) {
return new SourceFolder(Paths.get(path));
}
private static IjFolder buildTestFolder(String path) {
return new TestFolder(Paths.get(path), true);
}
private static IjFolder buildNonCoalescingFolder(String path) {
return new AndroidResourceFolder(Paths.get(path));
}
private static IjFolder buildJavaResourceFolder(String path, String resourcesRoot) {
return new JavaResourceFolder(
Paths.get(path), Paths.get(resourcesRoot), ImmutableSortedSet.of());
}
private static JavaPackageFinder fakePackageFinder() {
return fakePackageFinder(ImmutableMap.of());
}
private static JavaPackageFinder fakePackageFinder(ImmutableMap<Path, Path> packageMap) {
return new JavaPackageFinder() {
@Override
public Path findJavaPackageFolder(Path pathRelativeToProjectRoot) {
// The Path given here is a path to a file, not a folder.
pathRelativeToProjectRoot = Objects.requireNonNull(pathRelativeToProjectRoot.getParent());
if (packageMap.containsKey(pathRelativeToProjectRoot)) {
return packageMap.get(pathRelativeToProjectRoot);
}
return pathRelativeToProjectRoot;
}
@Override
public String findJavaPackage(Path pathRelativeToProjectRoot) {
return DefaultJavaPackageFinder.findJavaPackageWithPackageFolder(
findJavaPackageFolder(pathRelativeToProjectRoot));
}
@Override
public String findJavaPackage(BuildTarget buildTarget) {
return findJavaPackage(buildTarget.getBasePath().resolve("removed"));
}
};
}
@Test
public void testSameTypeAndPackageAreMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder left = buildSourceFolder("src/left");
IjFolder right = buildSourceFolder("src/right");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(left, right), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(buildSourceFolder("src")));
}
@Test
public void testSameTypeAndPackageAreMergedWithParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder left = buildSourceFolder("src/left");
IjFolder right = buildSourceFolder("src/right");
IjFolder parent = buildSourceFolder("src");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(left, right, parent), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(parent));
}
@Test
public void testMergedResourceFoldersShareSameResourcesRoot() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder left = buildJavaResourceFolder("res/left", "res");
IjFolder right = buildJavaResourceFolder("res/right", "res");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(left, right), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(buildJavaResourceFolder("res", "res")));
}
@Test
public void testSinglePathElement() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder src = buildSourceFolder("src");
assertThat(
simplifier.simplify(0, ImmutableSet.of(src), Paths.get(""), ImmutableSet.of()).values(),
Matchers.contains(src));
}
@Test
public void testSinglePathElementMergesIntoParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder parent = buildSourceFolder("src");
IjFolder child = buildSourceFolder("src/a");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(parent, child), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(parent));
}
@Test
public void testSimplificationLimit0() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder folder = buildSourceFolder("a/b/c/d/e/f/g");
assertThat(
simplifier.simplify(0, ImmutableSet.of(folder), Paths.get(""), ImmutableSet.of()).values(),
Matchers.contains(buildSourceFolder("a")));
}
@Test
public void testSimplificationLimit4() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder folder = buildSourceFolder("a/b/c/d/e/f/g");
assertThat(
simplifier.simplify(4, ImmutableSet.of(folder), Paths.get(""), ImmutableSet.of()).values(),
Matchers.contains(buildSourceFolder("a/b/c/d")));
}
@Test
public void testSimplificationLimit10() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder folder = buildSourceFolder("a/b/c/d/e/f/g");
assertThat(
simplifier.simplify(10, ImmutableSet.of(folder), Paths.get(""), ImmutableSet.of()).values(),
Matchers.contains(buildSourceFolder("a/b/c/d/e/f/g")));
}
@Test
public void testDifferentTypeAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder rightTest = buildTestFolder("src/right");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(leftSource, rightTest), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(buildSourceFolder("src"), rightTest));
}
@Test
public void testDifferentTypeAreNotMergedWithParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder parent = buildSourceFolder("src");
IjFolder leftSource = buildNoPrefixSourceFolder("src/left");
IjFolder rightTest = buildTestFolder("src/right");
assertThat(
simplifier
.simplify(
0, ImmutableSet.of(parent, leftSource, rightTest), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(parent, leftSource, rightTest));
}
@Test
public void testDifferentTypeAreNotMergedWhileSameOnesAre() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aaaSource = buildSourceFolder("a/a/a");
IjFolder aaaaSource = buildSourceFolder("a/a/a/a");
IjFolder aabSource = buildSourceFolder("a/a/b");
IjFolder abSource = buildSourceFolder("a/b");
IjFolder acTest = buildTestFolder("a/c");
IjFolder adaTest = buildTestFolder("a/d/a");
ImmutableCollection<IjFolder> mergedFolders =
simplifier
.simplify(
0,
ImmutableSet.of(aaaSource, aaaaSource, aabSource, abSource, acTest, adaTest),
Paths.get(""),
ImmutableSet.of())
.values();
IjFolder aSource = buildSourceFolder("a");
IjFolder adTest = buildTestFolder("a/d");
assertThat(mergedFolders, Matchers.containsInAnyOrder(aSource, acTest, adTest));
}
@Test
public void testDifferentResourcesRootsAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder left = buildJavaResourceFolder("res/test/left", "res/test");
IjFolder right = buildJavaResourceFolder("res/test/right", "res");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(left, right), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(left, right));
}
@Test
public void testMergingIntoBiggerNumberOfSourceFolders() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aaSource = buildSourceFolder("a/a");
IjFolder abSource = buildSourceFolder("a/b");
IjFolder acTest = buildTestFolder("a/c");
ImmutableCollection<IjFolder> mergedFolders =
simplifier
.simplify(
0, ImmutableSet.of(aaSource, abSource, acTest), Paths.get(""), ImmutableSet.of())
.values();
IjFolder aSource = buildSourceFolder("a");
assertThat(mergedFolders, Matchers.containsInAnyOrder(aSource, acTest));
}
@Test
public void testMergingIntoBiggerNumberOfTestFolders() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aaSource = buildSourceFolder("a/a");
IjFolder abSource = buildSourceFolder("a/b");
IjFolder acTest = buildTestFolder("a/c");
IjFolder adTest = buildTestFolder("a/d");
IjFolder aeTest = buildTestFolder("a/e");
ImmutableCollection<IjFolder> mergedFolders =
simplifier
.simplify(
0,
ImmutableSet.of(aaSource, abSource, acTest, adTest, aeTest),
Paths.get(""),
ImmutableSet.of())
.values();
IjFolder aTest = buildTestFolder("a");
assertThat(mergedFolders, Matchers.containsInAnyOrder(aaSource, abSource, aTest));
}
@Test
public void testDifferentTypesAreNotMergedIntoParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aSource = buildSourceFolder("a");
IjFolder aaaSource = buildSourceFolder("a/a/a");
IjFolder aaaaSource = buildSourceFolder("a/a/a/a");
IjFolder aabSource = buildSourceFolder("a/a/b");
IjFolder abSource = buildSourceFolder("a/b");
IjFolder acTest = buildTestFolder("a/c");
IjFolder adaTest = buildTestFolder("a/d/a");
ImmutableCollection<IjFolder> mergedFolders =
simplifier
.simplify(
0,
ImmutableSet.of(
aSource, aaaSource, aaaaSource, aabSource, abSource, acTest, adaTest),
Paths.get(""),
ImmutableSet.of())
.values();
IjFolder adTest = buildTestFolder("a/d");
assertThat(mergedFolders, Matchers.containsInAnyOrder(aSource, acTest, adTest));
}
@Test
public void testDifferentPackageHierarchiesAreNotMerged() {
IjSourceRootSimplifier simplifier =
new IjSourceRootSimplifier(
fakePackageFinder(
ImmutableMap.of(
Paths.get("src/left"), Paths.get("onething"),
Paths.get("src/right"), Paths.get("another"))));
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder rightSource = buildTestFolder("src/right");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(leftSource, rightSource), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(leftSource, rightSource));
}
@Test
public void testDifferentPackageHierarchiesAreNotMergedIntoParent() {
IjSourceRootSimplifier simplifier =
new IjSourceRootSimplifier(
fakePackageFinder(
ImmutableMap.of(
Paths.get("src"), Paths.get("onething"),
Paths.get("src/left"), Paths.get("onething/left"),
Paths.get("src/right"), Paths.get("another"))));
IjFolder parentSource = buildSourceFolder("src");
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder rightSource = buildTestFolder("src/right");
assertThat(
simplifier
.simplify(
0,
ImmutableSet.of(parentSource, leftSource, rightSource),
Paths.get(""),
ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(parentSource, rightSource));
}
@Test
public void testDifferentResourcesRootAreNotMergedIntoParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder parent = buildJavaResourceFolder("res/test", "res/test");
IjFolder left = buildJavaResourceFolder("res/test/left", "res/test");
IjFolder right = buildJavaResourceFolder("res/test/right", "res");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(parent, left, right), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(parent, right));
}
@Test
public void testShortPackagesAreMerged() {
IjSourceRootSimplifier simplifier =
new IjSourceRootSimplifier(
fakePackageFinder(
ImmutableMap.of(
Paths.get("r/x/a/a"), Paths.get("a/a"),
Paths.get("r/x/a/b"), Paths.get("a/b"))));
IjFolder aSource = buildSourceFolder("r/x/a/a");
IjFolder bSource = buildSourceFolder("r/x/a/b");
assertThat(
simplifier
.simplify(0, ImmutableSet.of(aSource, bSource), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(buildSourceFolder("r/x")));
}
@Test
public void testExcludeFoldersAreNotMerged() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder aaExclude = buildExcludeFolder("src/a/a");
IjFolder abExclude = buildExcludeFolder("src/a/b");
assertThat(
simplifier
.simplify(
0,
ImmutableSet.of(leftSource, abExclude, aaExclude),
Paths.get(""),
ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(buildSourceFolder("src"), aaExclude, abExclude));
}
@Test
public void testExcludeFoldersAreMergedIntoParent() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder aExclude = buildExcludeFolder("src/a");
IjFolder aaExclude = buildExcludeFolder("src/a/a");
assertThat(
simplifier
.simplify(
0,
ImmutableSet.of(leftSource, aExclude, aaExclude),
Paths.get(""),
ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(buildSourceFolder("src"), aExclude));
}
@Test
public void testExcludeFoldersAreNotMergedIntoParentWhenNonExcludedFoldersExist() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder leftSource = buildSourceFolder("src/left");
IjFolder aaExclude = buildExcludeFolder("src/a/a");
IjFolder abExclude = buildExcludeFolder("src/a/b");
IjFolder acNonCoalescing = buildNonCoalescingFolder("src/a/c");
assertThat(
simplifier
.simplify(
0,
ImmutableSet.of(leftSource, abExclude, aaExclude, acNonCoalescing),
Paths.get(""),
ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(
buildSourceFolder("src"), abExclude, aaExclude, acNonCoalescing));
}
@Test
public void testPrefixlessSourcesAreMergedToHighestRoot() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aFolder = buildNoPrefixSourceFolder("src/a/b");
IjFolder aaFolder = buildNoPrefixSourceFolder("src/a/a");
IjFolder bFolder = buildNoPrefixSourceFolder("src/b");
assertThat(
simplifier
.simplify(
0, ImmutableSet.of(aFolder, aaFolder, bFolder), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.contains(buildNoPrefixSourceFolder("")));
}
@Test
public void textPrefixAndPrefixlessSourcesDontMerge() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder aFolder = buildNoPrefixSourceFolder("src/a/b");
IjFolder aaFolder = buildSourceFolder("src/a/a");
IjFolder bFolder = buildNoPrefixSourceFolder("src/b");
assertThat(
simplifier
.simplify(
0, ImmutableSet.of(aFolder, aaFolder, bFolder), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(buildSourceFolder("src"), aFolder, bFolder));
}
@Test
public void testNonCoalescingChildrenDontMerge() {
IjSourceRootSimplifier simplifier = new IjSourceRootSimplifier(fakePackageFinder());
IjFolder abFolder = buildSourceFolder("src/a/b");
IjFolder abrFolder = buildNonCoalescingFolder("src/a/b/r");
IjFolder acFolder = buildSourceFolder("src/a/c");
assertThat(
simplifier
.simplify(
0, ImmutableSet.of(abFolder, abrFolder, acFolder), Paths.get(""), ImmutableSet.of())
.values(),
Matchers.containsInAnyOrder(abrFolder, buildSourceFolder("src")));
}
}
|
|
/*
* Copyright (c) 2010-2013 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.repo.sql;
import java.util.Set;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.path.ItemName;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.schema.SchemaRegistry;
import com.evolveum.midpoint.repo.sql.data.common.any.RAnyConverter;
import com.evolveum.midpoint.repo.sql.data.common.any.RAnyValue;
import com.evolveum.midpoint.repo.sql.data.common.type.RObjectExtensionType;
import com.evolveum.midpoint.repo.sql.util.DtoTranslationException;
import com.evolveum.midpoint.schema.DeltaConvertor;
import com.evolveum.midpoint.schema.SchemaConstantsGenerated;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.BeforeAfterType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.GenericObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.hibernate.Session;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.testng.AssertJUnit;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
/**
* @author lazyman
*/
@ContextConfiguration(locations = {"../../../../../ctx-test.xml"})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class RAnyConverterStaticTest extends BaseSQLRepoTest {
private static final Trace LOGGER = TraceManager.getTrace(RAnyConverterStaticTest.class);
private static final String NS_P = "http://example.com/p";
private static final String NS_T = PrismConstants.NS_TYPES;
private static final String NS_FOO_RESOURCE = "http://example.com/foo";
@Test
public void testExtensionPolyString() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "polyType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element poly = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
Element orig = DOMUtil.createElement(poly.getOwnerDocument(), new QName(NS_T, "orig"));
orig.setTextContent("Foo_Bar");
Element norm = DOMUtil.createElement(poly.getOwnerDocument(), new QName(NS_T, "norm"));
norm.setTextContent("foo bar");
poly.appendChild(orig);
poly.appendChild(norm);
Object realValue = RAnyConverter.getRealRepoValue(def, poly, prismContext);
AssertJUnit.assertEquals(new PolyString("Foo_Bar", "foo bar"), realValue);
session.close();
}
@Test
public void testExtensionInteger() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "intType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("123");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals(123L, realValue);
session.close();
}
@Test
public void testExtensionLong() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "longType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("123");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals(123L, realValue);
session.close();
}
@Test
public void testExtensionShort() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "shortType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("123");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals(123L, realValue);
session.close();
}
@Test
public void testExtensionDouble() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "doubleType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("123.1");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("123.1", realValue);
session.close();
}
@Test
public void testExtensionFloat() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "floatType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("123.1");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("123.1", realValue);
session.close();
}
@Test
public void testExtensionString() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "floatType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("example");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("example", realValue);
session.close();
}
@Test
public void testExtensionEnum() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "enumType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
PrismProperty item = (PrismProperty) def.instantiate();
item.setRealValue(BeforeAfterType.AFTER);
def.toMutable().setItemName(valueName);
RAnyConverter converter = new RAnyConverter(prismContext, extItemDictionary);
Set<RAnyValue<?>> values;
try {
values = converter.convertToRValue(item, false, RObjectExtensionType.EXTENSION);
AssertJUnit.fail("Should have throw serialization related exception after creating ext item");
} catch (RestartOperationRequestedException ex) { // this is a new way
System.out.println("Got expected exception: " + ex);
} catch (DtoTranslationException ex) { // this was an old way
AssertJUnit.assertEquals("Wrong exception class", RestartOperationRequestedException.class, ex.getCause().getClass());
}
values = converter.convertToRValue(item, false, RObjectExtensionType.EXTENSION);
AssertJUnit.assertEquals("Expected only one enum value, but was " + values.size(), 1, values.size());
RAnyValue value = values.iterator().next();
AssertJUnit.assertEquals("after", value.getValue());
session.close();
}
@Test
public void testExtensionDecimal() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "decimalType");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setTextContent("1234");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("1234", realValue);
session.close();
}
@Test
public void testExtensionClob() throws Exception {
Session session = getFactory().openSession();
QName valueName = new QName(NS_P, "locations");
ItemDefinition def = getDefinition(GenericObjectType.class, ItemPath.create(ObjectType.F_EXTENSION, valueName));
AssertJUnit.assertNotNull(def);
Document document = DOMUtil.getDocument();
Element value = DOMUtil.createElement(document, valueName);
Element location = DOMUtil.createElement(document, new QName(NS_P, "location"));
value.appendChild(location);
location.setAttribute("key", "heaven");
location.setTextContent("somewhere above");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
//asserting simple dom
document = DOMUtil.parseDocument((String) realValue);
Element root = document.getDocumentElement();
AssertJUnit.assertNotNull(root);
AssertJUnit.assertEquals("locations", root.getLocalName());
AssertJUnit.assertEquals(NS_P, root.getNamespaceURI());
AssertJUnit.assertEquals(1, DOMUtil.listChildElements(root).size());
location = DOMUtil.listChildElements(root).get(0);
AssertJUnit.assertNotNull(location);
AssertJUnit.assertEquals("location", location.getLocalName());
AssertJUnit.assertEquals(NS_P, location.getNamespaceURI());
AssertJUnit.assertEquals(0, DOMUtil.listChildElements(location).size());
AssertJUnit.assertEquals("heaven", location.getAttribute("key"));
AssertJUnit.assertEquals("somewhere above", location.getTextContent());
session.close();
}
@Test
public void testAttributesString() throws Exception {
Session session = getFactory().openSession();
ItemName valueName = new ItemName(NS_FOO_RESOURCE, "uid");
ItemDefinition def = getDefinition(GenericObjectType.class, valueName);
AssertJUnit.assertNull(def);
Element value = createAttributeValue(valueName, "xsd:string", "some uid");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("some uid", realValue);
session.close();
}
@Test
public void testAttributesDouble() throws Exception {
Session session = getFactory().openSession();
ItemName valueName = new ItemName(NS_FOO_RESOURCE, "uid");
ItemDefinition def = getDefinition(GenericObjectType.class, valueName);
AssertJUnit.assertNull(def);
Element value = createAttributeValue(valueName, "xsd:double", "123.1");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals("123.1", realValue);
session.close();
}
private Element createAttributeValue(QName valueName, String xsdType, String textContent) {
Element value = DOMUtil.createElement(DOMUtil.getDocument(), valueName);
value.setAttributeNS(XMLConstants.XMLNS_ATTRIBUTE_NS_URI, "xmlns:xsd", XMLConstants.W3C_XML_SCHEMA_NS_URI);
value.setAttributeNS(XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI, "xsi:type", xsdType);
value.setTextContent(textContent);
return value;
}
@Test
public void testAttributesLong() throws Exception {
Session session = getFactory().openSession();
ItemName valueName = new ItemName(NS_FOO_RESOURCE, "uid");
ItemDefinition def = getDefinition(GenericObjectType.class, valueName);
AssertJUnit.assertNull(def);
Element value = createAttributeValue(valueName, "xsd:long", "123");
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals(123L, realValue);
session.close();
}
@Test
public void testUserFullnamePolyString() throws Exception {
Session session = getFactory().openSession();
ItemDefinition def = getDefinition(UserType.class, UserType.F_FULL_NAME);
AssertJUnit.assertNotNull("Definition not found for " + UserType.F_FULL_NAME, def);
Element value = DOMUtil.createElement(DOMUtil.getDocument(), UserType.F_FULL_NAME);
Element orig = DOMUtil.createElement(value.getOwnerDocument(), new QName(NS_T, "orig"));
orig.setTextContent("john example");
Element norm = DOMUtil.createElement(value.getOwnerDocument(), new QName(NS_T, "norm"));
norm.setTextContent("john example");
value.appendChild(orig);
value.appendChild(norm);
Object realValue = RAnyConverter.getRealRepoValue(def, value, prismContext);
AssertJUnit.assertEquals(new PolyString("john example", "john example"), realValue);
session.close();
}
private <T extends ObjectType> ItemDefinition getDefinition(Class<T> type, ItemPath path) {
SchemaRegistry registry = prismContext.getSchemaRegistry();
PrismObjectDefinition objectDef = registry.findObjectDefinitionByCompileTimeClass(type);
return objectDef.findItemDefinition(path);
}
private Element createExtensionPath() {
Document document = DOMUtil.getDocument();
Element extension = DOMUtil.createElement(document, DeltaConvertor.PATH_ELEMENT_NAME);
extension.setAttributeNS(XMLConstants.XMLNS_ATTRIBUTE_NS_URI, "xmlns:c", SchemaConstantsGenerated.NS_COMMON);
extension.setTextContent("c:extension");
return extension;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tests.api.java.util;
import dalvik.annotation.TestTargetNew;
import dalvik.annotation.TestTargets;
import dalvik.annotation.TestLevel;
import dalvik.annotation.TestTargetClass;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.ConcurrentModificationException;
import java.util.Vector;
import tests.support.Support_ListTest;
@TestTargetClass(ArrayList.class)
public class ArrayListTest extends junit.framework.TestCase {
List alist;
Object[] objArray;
/**
* @tests java.util.ArrayList#ArrayList()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "ArrayList",
args = {}
)
public void test_Constructor() {
// Test for method java.util.ArrayList()
new Support_ListTest("", alist).runTest();
ArrayList subList = new ArrayList();
for (int i = -50; i < 150; i++)
subList.add(new Integer(i));
new Support_ListTest("", subList.subList(50, 150)).runTest();
}
/**
* @tests java.util.ArrayList#ArrayList(int)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "ArrayList",
args = {int.class}
)
public void test_ConstructorI() {
// Test for method java.util.ArrayList(int)
ArrayList al = new ArrayList(5);
assertEquals("Incorrect arrayList created", 0, al.size());
try {
new ArrayList(-10);
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#ArrayList(java.util.Collection)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "Doesn't verify NullPointerException.",
method = "ArrayList",
args = {java.util.Collection.class}
)
public void test_ConstructorLjava_util_Collection() {
// Test for method java.util.ArrayList(java.util.Collection)
ArrayList al = new ArrayList(Arrays.asList(objArray));
assertTrue("arrayList created from collection has incorrect size", al
.size() == objArray.length);
for (int counter = 0; counter < objArray.length; counter++)
assertTrue(
"arrayList created from collection has incorrect elements",
al.get(counter) == objArray[counter]);
try {
new ArrayList(null);
fail("NullPointerException expected");
} catch (NullPointerException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#add(int, java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "add",
args = {int.class, java.lang.Object.class}
)
public void test_addILjava_lang_Object() {
// Test for method void java.util.ArrayList.add(int, java.lang.Object)
Object o;
alist.add(50, o = new Object());
assertTrue("Failed to add Object", alist.get(50) == o);
assertTrue("Failed to fix up list after insert",
alist.get(51) == objArray[50]
&& (alist.get(52) == objArray[51]));
Object oldItem = alist.get(25);
alist.add(25, null);
assertNull("Should have returned null", alist.get(25));
assertTrue("Should have returned the old item from slot 25", alist
.get(26) == oldItem);
try {
alist.add(-1, null);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
try {
alist.add(alist.size() + 1, null);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#add(java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "add",
args = {java.lang.Object.class}
)
public void test_addLjava_lang_Object() {
// Test for method boolean java.util.ArrayList.add(java.lang.Object)
Object o = new Object();
alist.add(o);
assertTrue("Failed to add Object", alist.get(alist.size() - 1) == o);
alist.add(null);
assertNull("Failed to add null", alist.get(alist.size() - 1));
}
/**
* @tests java.util.ArrayList#addAll(int, java.util.Collection)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "addAll",
args = {int.class, java.util.Collection.class}
)
public void test_addAllILjava_util_Collection() {
// Test for method boolean java.util.ArrayList.addAll(int,
// java.util.Collection)
alist.addAll(50, alist);
assertEquals("Returned incorrect size after adding to existing list",
200, alist.size());
for (int i = 0; i < 50; i++)
assertTrue("Manipulated elements < index",
alist.get(i) == objArray[i]);
for (int i = 0; i >= 50 && (i < 150); i++)
assertTrue("Failed to ad elements properly",
alist.get(i) == objArray[i - 50]);
for (int i = 0; i >= 150 && (i < 200); i++)
assertTrue("Failed to ad elements properly",
alist.get(i) == objArray[i - 100]);
ArrayList listWithNulls = new ArrayList();
listWithNulls.add(null);
listWithNulls.add(null);
listWithNulls.add("yoink");
listWithNulls.add("kazoo");
listWithNulls.add(null);
alist.addAll(100, listWithNulls);
assertTrue("Incorrect size: " + alist.size(), alist.size() == 205);
assertNull("Item at slot 100 should be null", alist.get(100));
assertNull("Item at slot 101 should be null", alist.get(101));
assertEquals("Item at slot 102 should be 'yoink'",
"yoink", alist.get(102));
assertEquals("Item at slot 103 should be 'kazoo'",
"kazoo", alist.get(103));
assertNull("Item at slot 104 should be null", alist.get(104));
alist.addAll(205, listWithNulls);
assertTrue("Incorrect size2: " + alist.size(), alist.size() == 210);
try {
alist.addAll(-1, listWithNulls);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
try {
alist.addAll(alist.size() + 1, listWithNulls);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
try {
alist.addAll(0, null);
fail("NullPointerException expected");
} catch (NullPointerException e) {
//expected
}
}
// BEGIN android-removed
// The spec does not mandate that IndexOutOfBoundsException be thrown in
// preference to NullPointerException when the caller desserves both.
//
// /**
// * @tests java.util.ArrayList#addAll(int, java.util.Collection)
// */
// @TestTargetNew(
// level = TestLevel.PARTIAL_COMPLETE,
// notes = "Verifies IndexOutOfBoundsException.",
// method = "addAll",
// args = {int.class, java.util.Collection.class}
// )
// public void test_addAllILjava_util_Collection_2() {
// // Regression for HARMONY-467
// ArrayList obj = new ArrayList();
// try {
// obj.addAll((int) -1, (Collection) null);
// fail("IndexOutOfBoundsException expected");
// } catch (IndexOutOfBoundsException e) {
// }
// }
// END android-removed
/**
* @tests java.util.ArrayList#addAll(java.util.Collection)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "Doesn't verify NullPointerException.",
method = "addAll",
args = {java.util.Collection.class}
)
public void test_addAllLjava_util_Collection() {
// Test for method boolean
// java.util.ArrayList.addAll(java.util.Collection)
List l = new ArrayList();
l.addAll(alist);
for (int i = 0; i < alist.size(); i++)
assertTrue("Failed to add elements properly", l.get(i).equals(
alist.get(i)));
alist.addAll(alist);
assertEquals("Returned incorrect size after adding to existing list",
200, alist.size());
for (int i = 0; i < 100; i++) {
assertTrue("Added to list in incorrect order", alist.get(i)
.equals(l.get(i)));
assertTrue("Failed to add to existing list", alist.get(i + 100)
.equals(l.get(i)));
}
Set setWithNulls = new HashSet();
setWithNulls.add(null);
setWithNulls.add(null);
setWithNulls.add("yoink");
setWithNulls.add("kazoo");
setWithNulls.add(null);
alist.addAll(100, setWithNulls);
Iterator i = setWithNulls.iterator();
assertTrue("Item at slot 100 is wrong: " + alist.get(100), alist
.get(100) == i.next());
assertTrue("Item at slot 101 is wrong: " + alist.get(101), alist
.get(101) == i.next());
assertTrue("Item at slot 103 is wrong: " + alist.get(102), alist
.get(102) == i.next());
// Regression test for Harmony-3481
ArrayList<Integer> originalList = new ArrayList<Integer>(12);
for (int j = 0; j < 12; j++) {
originalList.add(j);
}
originalList.remove(0);
originalList.remove(0);
ArrayList<Integer> additionalList = new ArrayList<Integer>(11);
for (int j = 0; j < 11; j++) {
additionalList.add(j);
}
assertTrue(originalList.addAll(additionalList));
assertEquals(21, originalList.size());
try {
alist.addAll(null);
fail("NullPointerException expected");
} catch (NullPointerException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#clear()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "clear",
args = {}
)
public void test_clear() {
// Test for method void java.util.ArrayList.clear()
alist.clear();
assertEquals("List did not clear", 0, alist.size());
alist.add(null);
alist.add(null);
alist.add(null);
alist.add("bam");
alist.clear();
assertEquals("List with nulls did not clear", 0, alist.size());
/*
* for (int i = 0; i < alist.size(); i++) assertNull("Failed to clear
* list", alist.get(i));
*/
}
/**
* @tests java.util.ArrayList#clone()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "clone",
args = {}
)
public void test_clone() {
// Test for method java.lang.Object java.util.ArrayList.clone()
ArrayList x = (ArrayList) (((ArrayList) (alist)).clone());
assertTrue("Cloned list was inequal to original", x.equals(alist));
for (int i = 0; i < alist.size(); i++)
assertTrue("Cloned list contains incorrect elements",
alist.get(i) == x.get(i));
alist.add(null);
alist.add(25, null);
x = (ArrayList) (((ArrayList) (alist)).clone());
assertTrue("nulls test - Cloned list was inequal to original", x
.equals(alist));
for (int i = 0; i < alist.size(); i++)
assertTrue("nulls test - Cloned list contains incorrect elements",
alist.get(i) == x.get(i));
}
/**
* @tests java.util.ArrayList#contains(java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "contains",
args = {java.lang.Object.class}
)
public void test_containsLjava_lang_Object() {
// Test for method boolean
// java.util.ArrayList.contains(java.lang.Object)
assertTrue("Returned false for valid element", alist
.contains(objArray[99]));
assertTrue("Returned false for equal element", alist
.contains(new Integer(8)));
assertTrue("Returned true for invalid element", !alist
.contains(new Object()));
assertTrue("Returned true for null but should have returned false",
!alist.contains(null));
alist.add(null);
assertTrue("Returned false for null but should have returned true",
alist.contains(null));
}
/**
* @tests java.util.ArrayList#ensureCapacity(int)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "ensureCapacity",
args = {int.class}
)
public void test_ensureCapacityI() {
// Test for method void java.util.ArrayList.ensureCapacity(int)
// TODO : There is no good way to test this as it only really impacts on
// the private implementation.
Object testObject = new Object();
int capacity = 20;
ArrayList al = new ArrayList(capacity);
int i;
for (i = 0; i < capacity / 2; i++) {
al.add(i, new Object());
}
al.add(i, testObject);
int location = al.indexOf(testObject);
try {
al.ensureCapacity(capacity);
assertTrue("EnsureCapacity moved objects around in array1.",
location == al.indexOf(testObject));
al.remove(0);
al.ensureCapacity(capacity);
assertTrue("EnsureCapacity moved objects around in array2.",
--location == al.indexOf(testObject));
al.ensureCapacity(capacity + 2);
assertTrue("EnsureCapacity did not change location.",
location == al.indexOf(testObject));
} catch (Exception e) {
fail("Exception during test : " + e.getMessage());
}
}
/**
* @tests java.util.ArrayList#get(int)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "get",
args = {int.class}
)
public void test_getI() {
// Test for method java.lang.Object java.util.ArrayList.get(int)
assertTrue("Returned incorrect element", alist.get(22) == objArray[22]);
try {
alist.get(8765);
fail("Failed to throw expected exception for index > size");
} catch (IndexOutOfBoundsException e) {
}
}
/**
* @tests java.util.ArrayList#indexOf(java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "indexOf",
args = {java.lang.Object.class}
)
public void test_indexOfLjava_lang_Object() {
// Test for method int java.util.ArrayList.indexOf(java.lang.Object)
assertEquals("Returned incorrect index",
87, alist.indexOf(objArray[87]));
assertEquals("Returned index for invalid Object", -1, alist
.indexOf(new Object()));
alist.add(25, null);
alist.add(50, null);
assertTrue("Wrong indexOf for null. Wanted 25 got: "
+ alist.indexOf(null), alist.indexOf(null) == 25);
}
/**
* @tests java.util.ArrayList#isEmpty()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "isEmpty",
args = {}
)
public void test_isEmpty() {
// Test for method boolean java.util.ArrayList.isEmpty()
assertTrue("isEmpty returned false for new list", new ArrayList()
.isEmpty());
assertTrue("Returned true for existing list with elements", !alist
.isEmpty());
}
/**
* @tests java.util.ArrayList#lastIndexOf(java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "lastIndexOf",
args = {java.lang.Object.class}
)
public void test_lastIndexOfLjava_lang_Object() {
// Test for method int java.util.ArrayList.lastIndexOf(java.lang.Object)
alist.add(new Integer(99));
assertEquals("Returned incorrect index",
100, alist.lastIndexOf(objArray[99]));
assertEquals("Returned index for invalid Object", -1, alist
.lastIndexOf(new Object()));
alist.add(25, null);
alist.add(50, null);
assertTrue("Wrong lastIndexOf for null. Wanted 50 got: "
+ alist.lastIndexOf(null), alist.lastIndexOf(null) == 50);
}
/**
* @tests java.util.ArrayList#remove(int)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "remove",
args = {int.class}
)
public void test_removeI() {
// Test for method java.lang.Object java.util.ArrayList.remove(int)
alist.remove(10);
assertEquals("Failed to remove element",
-1, alist.indexOf(objArray[10]));
try {
alist.remove(999);
fail("Failed to throw exception when index out of range");
} catch (IndexOutOfBoundsException e) {
}
ArrayList myList = (ArrayList) (((ArrayList) (alist)).clone());
alist.add(25, null);
alist.add(50, null);
alist.remove(50);
alist.remove(25);
assertTrue("Removing nulls did not work", alist.equals(myList));
List list = new ArrayList(Arrays.asList(new String[] { "a", "b", "c",
"d", "e", "f", "g" }));
assertTrue("Removed wrong element 1", list.remove(0) == "a");
assertTrue("Removed wrong element 2", list.remove(4) == "f");
String[] result = new String[5];
list.toArray(result);
assertTrue("Removed wrong element 3", Arrays.equals(result,
new String[] { "b", "c", "d", "e", "g" }));
List l = new ArrayList(0);
l.add(new Object());
l.add(new Object());
l.remove(0);
l.remove(0);
try {
l.remove(-1);
fail("-1 should cause exception");
} catch (IndexOutOfBoundsException e) {
}
try {
l.remove(0);
fail("0 should case exception");
} catch (IndexOutOfBoundsException e) {
}
}
/**
* @tests java.util.ArrayList#set(int, java.lang.Object)
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "set",
args = {int.class, java.lang.Object.class}
)
public void test_setILjava_lang_Object() {
// Test for method java.lang.Object java.util.ArrayList.set(int,
// java.lang.Object)
Object obj;
alist.set(65, obj = new Object());
assertTrue("Failed to set object", alist.get(65) == obj);
alist.set(50, null);
assertNull("Setting to null did not work", alist.get(50));
assertTrue("Setting increased the list's size to: " + alist.size(),
alist.size() == 100);
try {
alist.set(-1, null);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
try {
alist.set(alist.size() + 1, null);
fail("IndexOutOfBoundsException expected");
} catch (IndexOutOfBoundsException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#size()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "size",
args = {}
)
public void test_size() {
// Test for method int java.util.ArrayList.size()
assertEquals("Returned incorrect size for exiting list",
100, alist.size());
assertEquals("Returned incorrect size for new list", 0, new ArrayList()
.size());
}
/**
* @tests java.util.ArrayList#toArray()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "toArray",
args = {}
)
public void test_toArray() {
// Test for method java.lang.Object [] java.util.ArrayList.toArray()
alist.set(25, null);
alist.set(75, null);
Object[] obj = alist.toArray();
assertEquals("Returned array of incorrect size", objArray.length,
obj.length);
for (int i = 0; i < obj.length; i++) {
if ((i == 25) || (i == 75))
assertNull("Should be null at: " + i + " but instead got: "
+ obj[i], obj[i]);
else
assertTrue("Returned incorrect array: " + i,
obj[i] == objArray[i]);
}
}
/**
* @tests java.util.ArrayList#toArray(java.lang.Object[])
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "toArray",
args = {java.lang.Object[].class}
)
public void test_toArray$Ljava_lang_Object() {
// Test for method java.lang.Object []
// java.util.ArrayList.toArray(java.lang.Object [])
alist.set(25, null);
alist.set(75, null);
Integer[] argArray = new Integer[100];
Object[] retArray;
retArray = alist.toArray(argArray);
assertTrue("Returned different array than passed", retArray == argArray);
argArray = new Integer[1000];
retArray = alist.toArray(argArray);
assertNull("Failed to set first extra element to null", argArray[alist
.size()]);
for (int i = 0; i < 100; i++) {
if ((i == 25) || (i == 75))
assertNull("Should be null: " + i, retArray[i]);
else
assertTrue("Returned incorrect array: " + i,
retArray[i] == objArray[i]);
}
String[] strArray = new String[100];
try {
alist.toArray(strArray);
fail("ArrayStoreException expected");
} catch (ArrayStoreException e) {
//expected
}
}
/**
* @tests java.util.ArrayList#trimToSize()
*/
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "trimToSize",
args = {}
)
public void test_trimToSize_01() {
// Test for method void java.util.ArrayList.trimToSize()
for (int i = 99; i > 24; i--)
alist.remove(i);
((ArrayList) alist).trimToSize();
assertEquals("Returned incorrect size after trim", 25, alist.size());
for (int i = 0; i < alist.size(); i++)
assertTrue("Trimmed list contained incorrect elements", alist
.get(i) == objArray[i]);
Vector v = new Vector();
v.add("a");
v.add("b");
ArrayList al = new ArrayList(v);
Iterator it = al.iterator();
al.remove(0);
al.trimToSize();
try {
it.next();
fail("should throw a ConcurrentModificationException");
} catch (ConcurrentModificationException ioobe) {
// expected
}
}
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "trimToSize",
args = {}
)
public void test_trimToSize_02() {
ArrayList list = new ArrayList(Arrays.asList(new String[] { "a", "b", "c",
"d", "e", "f", "g" }));
list.remove("a");
list.remove("f");
list.trimToSize();
}
/**
* @test java.util.ArrayList#addAll(int, Collection)
*/
@TestTargetNew(
level = TestLevel.PARTIAL_COMPLETE,
notes = "Doesn't verify NullPointerException.",
method = "addAll",
args = {int.class, java.util.Collection.class}
)
public void test_addAll() {
ArrayList list = new ArrayList();
list.add("one");
list.add("two");
assertEquals(2, list.size());
list.remove(0);
assertEquals(1, list.size());
ArrayList collection = new ArrayList();
collection.add("1");
collection.add("2");
collection.add("3");
assertEquals(3, collection.size());
list.addAll(0, collection);
assertEquals(4, list.size());
list.remove(0);
list.remove(0);
assertEquals(2, list.size());
collection.add("4");
collection.add("5");
collection.add("6");
collection.add("7");
collection.add("8");
collection.add("9");
collection.add("10");
collection.add("11");
collection.add("12");
assertEquals(12, collection.size());
list.addAll(0, collection);
assertEquals(14, list.size());
}
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "remove",
args = {java.lang.Object.class}
)
public void test_removeLjava_lang_Object() {
List list = new ArrayList(Arrays.asList(new String[] { "a", "b", "c",
"d", "e", "f", "g" }));
assertTrue("Removed wrong element 1", list.remove("a"));
assertTrue("Removed wrong element 2", list.remove("f"));
String[] result = new String[5];
list.toArray(result);
assertTrue("Removed wrong element 3", Arrays.equals(result,
new String[] { "b", "c", "d", "e", "g" }));
}
class Mock_ArrayList extends ArrayList {
public Mock_ArrayList() {
}
public void removeRange(int begin, int end) {
super.removeRange(begin, end);
}
}
@TestTargetNew(
level = TestLevel.COMPLETE,
notes = "",
method = "removeRange",
args = {int.class, int.class}
)
public void test_removeRangeII() {
Mock_ArrayList mal = new Mock_ArrayList();
mal.add("a");
mal.add("b");
mal.add("c");
mal.add("d");
mal.add("e");
mal.add("f");
mal.add("g");
mal.add("h");
mal.removeRange(2, 4);
String[] result = new String[6];
mal.toArray(result);
assertTrue("Removed wrong element 3", Arrays.equals(result,
new String[] { "a", "b", "e", "f", "g", "h"}));
}
/**
* Sets up the fixture, for example, open a network connection. This method
* is called before a test is executed.
*/
protected void setUp() throws Exception {
super.setUp();
objArray = new Object[100];
for (int i = 0; i < objArray.length; i++) {
objArray[i] = new Integer(i);
}
alist = new ArrayList();
for (int i = 0; i < objArray.length; i++) {
alist.add(objArray[i]);
}
}
@Override
protected void tearDown() throws Exception {
objArray = null;
alist = null;
super.tearDown();
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.jute.BinaryInputArchive;
import org.apache.jute.BinaryOutputArchive;
import org.apache.jute.Record;
import org.apache.log4j.Logger;
import org.apache.zookeeper.AsyncCallback.ACLCallback;
import org.apache.zookeeper.AsyncCallback.Children2Callback;
import org.apache.zookeeper.AsyncCallback.ChildrenCallback;
import org.apache.zookeeper.AsyncCallback.DataCallback;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.AsyncCallback.StringCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.Watcher.Event;
import org.apache.zookeeper.Watcher.Event.EventType;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.apache.zookeeper.ZooDefs.OpCode;
import org.apache.zookeeper.ZooKeeper.States;
import org.apache.zookeeper.ZooKeeper.WatchRegistration;
import org.apache.zookeeper.client.HostProvider;
import org.apache.zookeeper.proto.AuthPacket;
import org.apache.zookeeper.proto.ConnectRequest;
import org.apache.zookeeper.proto.CreateResponse;
import org.apache.zookeeper.proto.ExistsResponse;
import org.apache.zookeeper.proto.GetACLResponse;
import org.apache.zookeeper.proto.GetChildren2Response;
import org.apache.zookeeper.proto.GetChildrenResponse;
import org.apache.zookeeper.proto.GetDataResponse;
import org.apache.zookeeper.proto.ReplyHeader;
import org.apache.zookeeper.proto.RequestHeader;
import org.apache.zookeeper.proto.SetACLResponse;
import org.apache.zookeeper.proto.SetDataResponse;
import org.apache.zookeeper.proto.SetWatches;
import org.apache.zookeeper.proto.WatcherEvent;
import org.apache.zookeeper.server.ByteBufferInputStream;
import org.apache.zookeeper.server.ZooTrace;
/**
* This class manages the socket i/o for the client. ClientCnxn maintains a list
* of available servers to connect to and "transparently" switches servers it is
* connected to as needed.
*
*/
public class ClientCnxn {
private static final Logger LOG = Logger.getLogger(ClientCnxn.class);
/** This controls whether automatic watch resetting is enabled.
* Clients automatically reset watches during session reconnect, this
* option allows the client to turn off this behavior by setting
* the environment variable "zookeeper.disableAutoWatchReset" to "true" */
private static boolean disableAutoWatchReset;
static {
// this var should not be public, but otw there is no easy way
// to test
disableAutoWatchReset =
Boolean.getBoolean("zookeeper.disableAutoWatchReset");
if (LOG.isDebugEnabled()) {
LOG.debug("zookeeper.disableAutoWatchReset is "
+ disableAutoWatchReset);
}
}
static class AuthData {
AuthData(String scheme, byte data[]) {
this.scheme = scheme;
this.data = data;
}
String scheme;
byte data[];
}
private final CopyOnWriteArraySet<AuthData> authInfo = new CopyOnWriteArraySet<AuthData>();
/**
* These are the packets that have been sent and are waiting for a response.
*/
private final LinkedList<Packet> pendingQueue = new LinkedList<Packet>();
/**
* These are the packets that need to be sent.
*/
private final LinkedList<Packet> outgoingQueue = new LinkedList<Packet>();
private int connectTimeout;
/**
* The timeout in ms the client negotiated with the server. This is the
* "real" timeout, not the timeout request by the client (which may have
* been increased/decreased by the server which applies bounds to this
* value.
*/
private volatile int negotiatedSessionTimeout;
private int readTimeout;
private final int sessionTimeout;
private final ZooKeeper zooKeeper;
private final ClientWatchManager watcher;
private long sessionId;
private byte sessionPasswd[] = new byte[16];
final String chrootPath;
final SendThread sendThread;
final EventThread eventThread;
/**
* Set to true when close is called. Latches the connection such that we
* don't attempt to re-connect to the server if in the middle of closing the
* connection (client sends session disconnect to server as part of close
* operation)
*/
private volatile boolean closing = false;
/**
* A set of ZooKeeper hosts this client could connect to.
*/
private final HostProvider hostProvider;
public long getSessionId() {
return sessionId;
}
public byte[] getSessionPasswd() {
return sessionPasswd;
}
public int getSessionTimeout() {
return negotiatedSessionTimeout;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
SocketAddress local = sendThread.getClientCnxnSocket().getLocalSocketAddress();
SocketAddress remote = sendThread.getClientCnxnSocket().getRemoteSocketAddress();
sb
.append("sessionid:0x").append(Long.toHexString(getSessionId()))
.append(" local:").append(local)
.append(" remoteserver:").append(remote)
.append(" lastZxid:").append(lastZxid)
.append(" xid:").append(xid)
.append(" sent:").append(sendThread.getClientCnxnSocket().getSentCount())
.append(" recv:").append(sendThread.getClientCnxnSocket().getRecvCount())
.append(" queuedpkts:").append(outgoingQueue.size())
.append(" pendingresp:").append(pendingQueue.size())
.append(" queuedevents:").append(eventThread.waitingEvents.size());
return sb.toString();
}
/**
* This class allows us to pass the headers and the relevant records around.
*/
static class Packet {
RequestHeader requestHeader;
ReplyHeader replyHeader;
Record request;
Record response;
ByteBuffer bb;
/** Client's view of the path (may differ due to chroot) **/
String clientPath;
/** Servers's view of the path (may differ due to chroot) **/
String serverPath;
boolean finished;
AsyncCallback cb;
Object ctx;
WatchRegistration watchRegistration;
Packet(RequestHeader requestHeader, ReplyHeader replyHeader, Record request,
Record response, WatchRegistration watchRegistration) {
this.requestHeader = requestHeader;
this.replyHeader = replyHeader;
this.request = request;
this.response = response;
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryOutputArchive boa = BinaryOutputArchive.getArchive(baos);
boa.writeInt(-1, "len"); // We'll fill this in later
if (requestHeader != null) {
requestHeader.serialize(boa, "header");
}
if (request instanceof ConnectRequest) {
request.serialize(boa, "connect");
} else if (request != null) {
request.serialize(boa, "request");
}
baos.close();
this.bb = ByteBuffer.wrap(baos.toByteArray());
this.bb.putInt(this.bb.capacity() - 4);
this.bb.rewind();
} catch (IOException e) {
LOG.warn("Ignoring unexpected exception", e);
}
this.watchRegistration = watchRegistration;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("clientPath:" + clientPath);
sb.append(" serverPath:" + serverPath);
sb.append(" finished:" + finished);
sb.append(" header:: " + requestHeader);
sb.append(" replyHeader:: " + replyHeader);
sb.append(" request:: " + request);
sb.append(" response:: " + response);
// jute toString is horrible, remove unnecessary newlines
return sb.toString().replaceAll("\r*\n+", " ");
}
}
/**
* Creates a connection object. The actual network connect doesn't get
* established until needed. The start() instance method must be called
* subsequent to construction.
*
* @param chrootPath - the chroot of this client. Should be removed from this Class in ZOOKEEPER-838
* @param hostProvider
* the list of ZooKeeper servers to connect to
* @param sessionTimeout
* the timeout for connections.
* @param zooKeeper
* the zookeeper object that this connection is related to.
* @param watcher watcher for this connection
* @param clientCnxnSocket
* the socket implementation used (e.g. NIO/Netty)
* @throws IOException
*/
public ClientCnxn(String chrootPath, HostProvider hostProvider, int sessionTimeout, ZooKeeper zooKeeper,
ClientWatchManager watcher, ClientCnxnSocket clientCnxnSocket)
throws IOException {
this(chrootPath, hostProvider, sessionTimeout, zooKeeper, watcher, clientCnxnSocket, 0, new byte[16]);
}
/**
* Creates a connection object. The actual network connect doesn't get
* established until needed. The start() instance method must be called
* subsequent to construction.
*
* @param chrootPath - the chroot of this client. Should be removed from this Class in ZOOKEEPER-838
* @param hostProvider
* the list of ZooKeeper servers to connect to
* @param sessionTimeout
* the timeout for connections.
* @param zooKeeper
* the zookeeper object that this connection is related to.
* @param watcher watcher for this connection
* @param clientCnxnSocket
* the socket implementation used (e.g. NIO/Netty)
* @param sessionId session id if re-establishing session
* @param sessionPasswd session passwd if re-establishing session
* @throws IOException
*/
public ClientCnxn(String chrootPath, HostProvider hostProvider, int sessionTimeout, ZooKeeper zooKeeper,
ClientWatchManager watcher, ClientCnxnSocket clientCnxnSocket,
long sessionId, byte[] sessionPasswd) {
this.zooKeeper = zooKeeper;
this.watcher = watcher;
this.sessionId = sessionId;
this.sessionPasswd = sessionPasswd;
this.sessionTimeout = sessionTimeout;
this.hostProvider = hostProvider;
this.chrootPath = chrootPath;
connectTimeout = sessionTimeout / hostProvider.size();
readTimeout = sessionTimeout * 2 / 3;
sendThread = new SendThread(clientCnxnSocket);
eventThread = new EventThread();
}
/**
* tests use this to check on reset of watches
* @return if the auto reset of watches are disabled
*/
public static boolean getDisableAutoResetWatch() {
return disableAutoWatchReset;
}
/**
* tests use this to set the auto reset
* @param b the vaued to set disable watches to
*/
public static void setDisableAutoResetWatch(boolean b) {
disableAutoWatchReset = b;
}
public void start() {
sendThread.start();
eventThread.start();
}
private Object eventOfDeath = new Object();
private final static UncaughtExceptionHandler uncaughtExceptionHandler = new UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("from " + t.getName(), e);
}
};
private static class WatcherSetEventPair {
private final Set<Watcher> watchers;
private final WatchedEvent event;
public WatcherSetEventPair(Set<Watcher> watchers, WatchedEvent event) {
this.watchers = watchers;
this.event = event;
}
}
/**
* Guard against creating "-EventThread-EventThread-EventThread-..." thread
* names when ZooKeeper object is being created from within a watcher.
* See ZOOKEEPER-795 for details.
*/
private static String makeThreadName(String suffix) {
String name = Thread.currentThread().getName().
replaceAll("-EventThread", "");
return name + suffix;
}
class EventThread extends Thread {
private final LinkedBlockingQueue<Object> waitingEvents =
new LinkedBlockingQueue<Object>();
/** This is really the queued session state until the event
* thread actually processes the event and hands it to the watcher.
* But for all intents and purposes this is the state.
*/
private volatile KeeperState sessionState = KeeperState.Disconnected;
private volatile boolean wasKilled = false;
private volatile boolean isRunning = false;
EventThread() {
super(makeThreadName("-EventThread"));
setUncaughtExceptionHandler(uncaughtExceptionHandler);
setDaemon(true);
}
public void queueEvent(WatchedEvent event) {
if (event.getType() == EventType.None
&& sessionState == event.getState()) {
return;
}
sessionState = event.getState();
// materialize the watchers based on the event
WatcherSetEventPair pair = new WatcherSetEventPair(
watcher.materialize(event.getState(), event.getType(),
event.getPath()),
event);
// queue the pair (watch set & event) for later processing
waitingEvents.add(pair);
}
public void queuePacket(Packet packet) {
if (wasKilled) {
synchronized (waitingEvents) {
if (isRunning) waitingEvents.add(packet);
else processEvent(packet);
}
} else {
waitingEvents.add(packet);
}
}
public void queueEventOfDeath() {
waitingEvents.add(eventOfDeath);
}
@Override
public void run() {
try {
isRunning = true;
while (true) {
Object event = waitingEvents.take();
if (event == eventOfDeath) {
wasKilled = true;
} else {
processEvent(event);
}
if (wasKilled)
synchronized (waitingEvents) {
if (waitingEvents.isEmpty()) {
isRunning = false;
break;
}
}
}
} catch (InterruptedException e) {
LOG.error("Event thread exiting due to interruption", e);
}
LOG.info("EventThread shut down");
}
private void processEvent(Object event) {
try {
if (event instanceof WatcherSetEventPair) {
// each watcher will process the event
WatcherSetEventPair pair = (WatcherSetEventPair) event;
for (Watcher watcher : pair.watchers) {
try {
watcher.process(pair.event);
} catch (Throwable t) {
LOG.error("Error while calling watcher ", t);
}
}
} else {
Packet p = (Packet) event;
int rc = 0;
String clientPath = p.clientPath;
if (p.replyHeader.getErr() != 0) {
rc = p.replyHeader.getErr();
}
if (p.cb == null) {
LOG.warn("Somehow a null cb got to EventThread!");
} else if (p.response instanceof ExistsResponse
|| p.response instanceof SetDataResponse
|| p.response instanceof SetACLResponse) {
StatCallback cb = (StatCallback) p.cb;
if (rc == 0) {
if (p.response instanceof ExistsResponse) {
cb.processResult(rc, clientPath, p.ctx,
((ExistsResponse) p.response)
.getStat());
} else if (p.response instanceof SetDataResponse) {
cb.processResult(rc, clientPath, p.ctx,
((SetDataResponse) p.response)
.getStat());
} else if (p.response instanceof SetACLResponse) {
cb.processResult(rc, clientPath, p.ctx,
((SetACLResponse) p.response)
.getStat());
}
} else {
cb.processResult(rc, clientPath, p.ctx, null);
}
} else if (p.response instanceof GetDataResponse) {
DataCallback cb = (DataCallback) p.cb;
GetDataResponse rsp = (GetDataResponse) p.response;
if (rc == 0) {
cb.processResult(rc, clientPath, p.ctx, rsp
.getData(), rsp.getStat());
} else {
cb.processResult(rc, clientPath, p.ctx, null,
null);
}
} else if (p.response instanceof GetACLResponse) {
ACLCallback cb = (ACLCallback) p.cb;
GetACLResponse rsp = (GetACLResponse) p.response;
if (rc == 0) {
cb.processResult(rc, clientPath, p.ctx, rsp
.getAcl(), rsp.getStat());
} else {
cb.processResult(rc, clientPath, p.ctx, null,
null);
}
} else if (p.response instanceof GetChildrenResponse) {
ChildrenCallback cb = (ChildrenCallback) p.cb;
GetChildrenResponse rsp = (GetChildrenResponse) p.response;
if (rc == 0) {
cb.processResult(rc, clientPath, p.ctx, rsp
.getChildren());
} else {
cb.processResult(rc, clientPath, p.ctx, null);
}
} else if (p.response instanceof GetChildren2Response) {
Children2Callback cb = (Children2Callback) p.cb;
GetChildren2Response rsp = (GetChildren2Response) p.response;
if (rc == 0) {
cb.processResult(rc, clientPath, p.ctx, rsp
.getChildren(), rsp.getStat());
} else {
cb.processResult(rc, clientPath, p.ctx, null, null);
}
} else if (p.response instanceof CreateResponse) {
StringCallback cb = (StringCallback) p.cb;
CreateResponse rsp = (CreateResponse) p.response;
if (rc == 0) {
cb.processResult(rc, clientPath, p.ctx,
(chrootPath == null
? rsp.getPath()
: rsp.getPath()
.substring(chrootPath.length())));
} else {
cb.processResult(rc, clientPath, p.ctx, null);
}
} else if (p.cb instanceof VoidCallback) {
VoidCallback cb = (VoidCallback) p.cb;
cb.processResult(rc, clientPath, p.ctx);
}
}
} catch (Throwable t) {
LOG.error("Caught unexpected throwable", t);
}
}
}
private void finishPacket(Packet p) {
if (p.watchRegistration != null) {
p.watchRegistration.register(p.replyHeader.getErr());
}
if (p.cb == null) {
synchronized (p) {
p.finished = true;
p.notifyAll();
}
} else {
p.finished = true;
eventThread.queuePacket(p);
}
}
private void conLossPacket(Packet p) {
if (p.replyHeader == null) {
return;
}
switch (state) {
case AUTH_FAILED:
p.replyHeader.setErr(KeeperException.Code.AUTHFAILED.intValue());
break;
case CLOSED:
p.replyHeader.setErr(KeeperException.Code.SESSIONEXPIRED.intValue());
break;
default:
p.replyHeader.setErr(KeeperException.Code.CONNECTIONLOSS.intValue());
}
finishPacket(p);
}
private volatile long lastZxid;
static class EndOfStreamException extends IOException {
private static final long serialVersionUID = -5438877188796231422L;
public EndOfStreamException(String msg) {
super(msg);
}
@Override
public String toString() {
return "EndOfStreamException: " + getMessage();
}
}
private static class SessionTimeoutException extends IOException {
private static final long serialVersionUID = 824482094072071178L;
public SessionTimeoutException(String msg) {
super(msg);
}
}
private static class SessionExpiredException extends IOException {
private static final long serialVersionUID = -1388816932076193249L;
public SessionExpiredException(String msg) {
super(msg);
}
}
public static final int packetLen = Integer.getInteger("jute.maxbuffer",
4096 * 1024);
/**
* This class services the outgoing request queue and generates the heart
* beats. It also spawns the ReadThread.
*/
class SendThread extends Thread {
private long lastPingSentNs;
private final ClientCnxnSocket clientCnxnSocket;
private Random r = new Random(System.nanoTime());
private boolean isFirstConnect = true;
void readResponse(ByteBuffer incomingBuffer) throws IOException {
ByteBufferInputStream bbis = new ByteBufferInputStream(
incomingBuffer);
BinaryInputArchive bbia = BinaryInputArchive.getArchive(bbis);
ReplyHeader replyHdr = new ReplyHeader();
replyHdr.deserialize(bbia, "header");
if (replyHdr.getXid() == -2) {
// -2 is the xid for pings
if (LOG.isDebugEnabled()) {
LOG.debug("Got ping response for sessionid: 0x"
+ Long.toHexString(sessionId)
+ " after "
+ ((System.nanoTime() - lastPingSentNs) / 1000000)
+ "ms");
}
return;
}
if (replyHdr.getXid() == -4) {
// -4 is the xid for AuthPacket
if(replyHdr.getErr() == KeeperException.Code.AUTHFAILED.intValue()) {
state = States.AUTH_FAILED;
eventThread.queueEvent( new WatchedEvent(Watcher.Event.EventType.None,
Watcher.Event.KeeperState.AuthFailed, null) );
}
if (LOG.isDebugEnabled()) {
LOG.debug("Got auth sessionid:0x"
+ Long.toHexString(sessionId));
}
return;
}
if (replyHdr.getXid() == -1) {
// -1 means notification
if (LOG.isDebugEnabled()) {
LOG.debug("Got notification sessionid:0x"
+ Long.toHexString(sessionId));
}
WatcherEvent event = new WatcherEvent();
event.deserialize(bbia, "response");
// convert from a server path to a client path
if (chrootPath != null) {
String serverPath = event.getPath();
if(serverPath.compareTo(chrootPath)==0)
event.setPath("/");
else
event.setPath(serverPath.substring(chrootPath.length()));
}
WatchedEvent we = new WatchedEvent(event);
if (LOG.isDebugEnabled()) {
LOG.debug("Got " + we + " for sessionid 0x"
+ Long.toHexString(sessionId));
}
eventThread.queueEvent( we );
return;
}
Packet packet;
synchronized (pendingQueue) {
if (pendingQueue.size() == 0) {
throw new IOException("Nothing in the queue, but got "
+ replyHdr.getXid());
}
packet = pendingQueue.remove();
}
/*
* Since requests are processed in order, we better get a response
* to the first request!
*/
try {
if (packet.requestHeader.getXid() != replyHdr.getXid()) {
packet.replyHeader.setErr(
KeeperException.Code.CONNECTIONLOSS.intValue());
throw new IOException("Xid out of order. Got Xid "
+ replyHdr.getXid() + " with err " +
+ replyHdr.getErr() +
" expected Xid "
+ packet.requestHeader.getXid()
+ " for a packet with details: "
+ packet );
}
packet.replyHeader.setXid(replyHdr.getXid());
packet.replyHeader.setErr(replyHdr.getErr());
packet.replyHeader.setZxid(replyHdr.getZxid());
if (replyHdr.getZxid() > 0) {
lastZxid = replyHdr.getZxid();
}
if (packet.response != null && replyHdr.getErr() == 0) {
packet.response.deserialize(bbia, "response");
}
if (LOG.isDebugEnabled()) {
LOG.debug("Reading reply sessionid:0x"
+ Long.toHexString(sessionId) + ", packet:: " + packet);
}
} finally {
finishPacket(packet);
}
}
SendThread(ClientCnxnSocket clientCnxnSocket) {
super(makeThreadName("-SendThread()"));
state = States.CONNECTING;
this.clientCnxnSocket = clientCnxnSocket;
setUncaughtExceptionHandler(uncaughtExceptionHandler);
setDaemon(true);
}
// TODO: can not name this method getState since Thread.getState()
// already exists
// It would be cleaner to make class SendThread an implementation of
// Runnable
/**
* Used by ClientCnxnSocket
*
* @return
*/
ZooKeeper.States getZkState() {
return state;
}
ClientCnxnSocket getClientCnxnSocket() {
return clientCnxnSocket;
}
void primeConnection() throws IOException {
LOG.info("Socket connection established to "
+ clientCnxnSocket.getRemoteSocketAddress() + ", initiating session");
isFirstConnect = false;
ConnectRequest conReq = new ConnectRequest(0, lastZxid,
sessionTimeout, sessionId, sessionPasswd);
synchronized (outgoingQueue) {
// We add backwards since we are pushing into the front
// Only send if there's a pending watch
// TODO: here we have the only remaining use of zooKeeper in
// this class. It's to be eliminated!
if (!disableAutoWatchReset
&& (!zooKeeper.getDataWatches().isEmpty()
|| !zooKeeper.getExistWatches().isEmpty() || !zooKeeper
.getChildWatches().isEmpty())) {
SetWatches sw = new SetWatches(lastZxid,
zooKeeper.getDataWatches(),
zooKeeper.getExistWatches(),
zooKeeper.getChildWatches());
RequestHeader h = new RequestHeader();
h.setType(ZooDefs.OpCode.setWatches);
h.setXid(-8);
Packet packet = new Packet(h, new ReplyHeader(), sw, null, null);
outgoingQueue.addFirst(packet);
}
for (AuthData id : authInfo) {
outgoingQueue.addFirst(new Packet(new RequestHeader(-4,
OpCode.auth), null, new AuthPacket(0, id.scheme,
id.data), null, null));
}
outgoingQueue.addFirst((new Packet(null, null, conReq, null,
null)));
}
clientCnxnSocket.enableReadWriteOnly();
if (LOG.isDebugEnabled()) {
LOG.debug("Session establishment request sent on "
+ clientCnxnSocket.getRemoteSocketAddress());
}
}
private void sendPing() {
lastPingSentNs = System.nanoTime();
RequestHeader h = new RequestHeader(-2, OpCode.ping);
queuePacket(h, null, null, null, null, null, null, null, null);
}
private void startConnect() throws IOException {
if(!isFirstConnect){
try {
Thread.sleep(r.nextInt(1000));
} catch (InterruptedException e1) {
LOG.warn("Unexpected exception", e1);
}
}
state = States.CONNECTING;
InetSocketAddress addr = hostProvider.next(1000);
LOG.info("Opening socket connection to server " + addr);
setName(getName().replaceAll("\\(.*\\)",
"(" + addr.getHostName() + ":" + addr.getPort() + ")"));
clientCnxnSocket.connect(addr);
}
private static final String RETRY_CONN_MSG =
", closing socket connection and attempting reconnect";
@Override
public void run() {
clientCnxnSocket.introduce(this,sessionId);
clientCnxnSocket.updateNow();
clientCnxnSocket.updateLastSendAndHeard();
int to;
while (state.isAlive()) {
try {
if (!clientCnxnSocket.isConnected()) {
// don't re-establish connection if we are closing
if (closing) {
break;
}
startConnect();
clientCnxnSocket.updateLastSendAndHeard();
}
if (state == States.CONNECTED) {
to = readTimeout - clientCnxnSocket.getIdleRecv();
} else {
to = connectTimeout - clientCnxnSocket.getIdleRecv();
}
if (to <= 0) {
throw new SessionTimeoutException(
"Client session timed out, have not heard from server in "
+ clientCnxnSocket.getIdleRecv() + "ms"
+ " for sessionid 0x"
+ Long.toHexString(sessionId));
}
if (state == States.CONNECTED) {
int timeToNextPing = readTimeout / 2
- clientCnxnSocket.getIdleSend();
if (timeToNextPing <= 0) {
sendPing();
clientCnxnSocket.updateLastSend();
clientCnxnSocket.enableWrite();
} else {
if (timeToNextPing < to) {
to = timeToNextPing;
}
}
}
clientCnxnSocket.doTransport(to, pendingQueue, outgoingQueue);
} catch (Exception e) {
if (closing) {
if (LOG.isDebugEnabled()) {
// closing so this is expected
LOG.debug("An exception was thrown while closing send thread for session 0x"
+ Long.toHexString(getSessionId())
+ " : " + e.getMessage());
}
break;
} else {
// this is ugly, you have a better way speak up
if (e instanceof SessionExpiredException) {
LOG.info(e.getMessage() + ", closing socket connection");
} else if (e instanceof SessionTimeoutException) {
LOG.info(e.getMessage() + RETRY_CONN_MSG);
} else if (e instanceof EndOfStreamException) {
LOG.info(e.getMessage() + RETRY_CONN_MSG);
} else {
LOG.warn(
"Session 0x"
+ Long.toHexString(getSessionId())
+ " for server "
+ clientCnxnSocket.getRemoteSocketAddress()
+ ", unexpected error"
+ RETRY_CONN_MSG, e);
}
cleanup();
if (state.isAlive()) {
eventThread.queueEvent(new WatchedEvent(
Event.EventType.None,
Event.KeeperState.Disconnected,
null));
}
clientCnxnSocket.updateNow();
clientCnxnSocket.updateLastSendAndHeard();
}
}
}
cleanup();
clientCnxnSocket.close();
if (state.isAlive()) {
eventThread.queueEvent(new WatchedEvent(Event.EventType.None,
Event.KeeperState.Disconnected, null));
}
ZooTrace.logTraceMessage(LOG, ZooTrace.getTextTraceLevel(),
"SendThread exitedloop.");
}
private void cleanup() {
clientCnxnSocket.cleanup();
synchronized (pendingQueue) {
for (Packet p : pendingQueue) {
conLossPacket(p);
}
pendingQueue.clear();
}
synchronized (outgoingQueue) {
for (Packet p : outgoingQueue) {
conLossPacket(p);
}
outgoingQueue.clear();
}
}
/**
* Callback invoked by the ClientCnxnSocket once a connection has been
* established.
*
* @param _negotiatedSessionTimeout
* @param _sessionId
* @param _sessionPasswd
* @throws IOException
*/
void onConnected(int _negotiatedSessionTimeout, long _sessionId,
byte[] _sessionPasswd) throws IOException {
negotiatedSessionTimeout = _negotiatedSessionTimeout;
if (negotiatedSessionTimeout <= 0) {
state = States.CLOSED;
eventThread.queueEvent(new WatchedEvent(
Watcher.Event.EventType.None,
Watcher.Event.KeeperState.Expired, null));
eventThread.queueEventOfDeath();
throw new SessionExpiredException(
"Unable to reconnect to ZooKeeper service, session 0x"
+ Long.toHexString(sessionId) + " has expired");
}
readTimeout = negotiatedSessionTimeout * 2 / 3;
connectTimeout = negotiatedSessionTimeout / hostProvider.size();
hostProvider.onConnected();
sessionId = _sessionId;
sessionPasswd = _sessionPasswd;
state = States.CONNECTED;
LOG.info("Session establishment complete on server "
+ clientCnxnSocket.getRemoteSocketAddress() + ", sessionid = 0x"
+ Long.toHexString(sessionId) + ", negotiated timeout = "
+ negotiatedSessionTimeout);
eventThread.queueEvent(new WatchedEvent(
Watcher.Event.EventType.None,
Watcher.Event.KeeperState.SyncConnected, null));
}
void close() {
state = States.CLOSED;
clientCnxnSocket.wakeupCnxn();
}
void testableCloseSocket() throws IOException {
clientCnxnSocket.testableCloseSocket();
}
}
/**
* Shutdown the send/event threads. This method should not be called
* directly - rather it should be called as part of close operation. This
* method is primarily here to allow the tests to verify disconnection
* behavior.
*/
public void disconnect() {
if (LOG.isDebugEnabled()) {
LOG.debug("Disconnecting client for session: 0x"
+ Long.toHexString(getSessionId()));
}
sendThread.close();
eventThread.queueEventOfDeath();
}
/**
* Close the connection, which includes; send session disconnect to the
* server, shutdown the send/event threads.
*
* @throws IOException
*/
public void close() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Closing client for session: 0x"
+ Long.toHexString(getSessionId()));
}
try {
RequestHeader h = new RequestHeader();
h.setType(ZooDefs.OpCode.closeSession);
submitRequest(h, null, null, null);
} catch (InterruptedException e) {
// ignore, close the send/event threads
} finally {
disconnect();
}
}
private int xid = 1;
private volatile States state;
synchronized private int getXid() {
return xid++;
}
public ReplyHeader submitRequest(RequestHeader h, Record request,
Record response, WatchRegistration watchRegistration)
throws InterruptedException {
ReplyHeader r = new ReplyHeader();
Packet packet = queuePacket(h, r, request, response, null, null, null,
null, watchRegistration);
synchronized (packet) {
while (!packet.finished) {
packet.wait();
}
}
return r;
}
Packet queuePacket(RequestHeader h, ReplyHeader r, Record request,
Record response, AsyncCallback cb, String clientPath,
String serverPath, Object ctx, WatchRegistration watchRegistration)
{
Packet packet = null;
synchronized (outgoingQueue) {
if (h.getType() != OpCode.ping && h.getType() != OpCode.auth) {
h.setXid(getXid());
}
packet = new Packet(h, r, request, response, watchRegistration);
packet.cb = cb;
packet.ctx = ctx;
packet.clientPath = clientPath;
packet.serverPath = serverPath;
if (!state.isAlive() || closing) {
conLossPacket(packet);
} else {
// If the client is asking to close the session then
// mark as closing
if (h.getType() == OpCode.closeSession) {
closing = true;
}
outgoingQueue.add(packet);
}
}
sendThread.getClientCnxnSocket().wakeupCnxn();
return packet;
}
public void addAuthInfo(String scheme, byte auth[]) {
if (!state.isAlive()) {
return;
}
authInfo.add(new AuthData(scheme, auth));
queuePacket(new RequestHeader(-4, OpCode.auth), null,
new AuthPacket(0, scheme, auth), null, null, null, null,
null, null);
}
States getState() {
return state;
}
}
|
|
package ca.qc.bergeron.marcantoine.crammeur.librairy.utils.utils;
import org.jetbrains.annotations.Nullable;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import ca.qc.bergeron.marcantoine.crammeur.librairy.models.i.Data;
import ca.qc.bergeron.marcantoine.crammeur.librairy.utils.LongListIterator;
import ca.qc.bergeron.marcantoine.crammeur.librairy.utils.Parallel;
import ca.qc.bergeron.marcantoine.crammeur.librairy.utils.i.ListIterator;
/**
* Created by Marc-Antoine on 2017-09-19.
*/
public class UtilsTests {
@Test
public void testDataIntegerListIterator() {
}
@Test
public void testDataLongListIterator() throws InterruptedException {
final long count = 10000;
final ListIterator<Data<Long>, Long> dli = new LongListIterator<Data<Long>>();
final ListIterator<Data<Long>, Long> dli2 = new LongListIterator<>();
Assert.assertTrue(dli.equals(dli2));
Assert.assertTrue(dli.isEmpty());
Data<Long> data = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
dli.add(data);
Assert.assertTrue(!dli.isEmpty());
Assert.assertTrue(dli.contains(data));
Assert.assertTrue(dli.hasNext());
Assert.assertTrue(!dli.hasPrevious());
Assert.assertTrue(dli.next().equals(data));
Assert.assertTrue(dli.hasActual());
Assert.assertTrue(dli.get().equals(data));
Assert.assertTrue(!dli.hasPrevious());
Assert.assertTrue(!dli.hasNext());
Data<Long> data2 = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
data2.setId(1L);
dli.add(data2);
Assert.assertTrue(dli.contains(data) && dli.contains(data2));
Assert.assertTrue(dli.hasNext());
Assert.assertTrue(dli.next().getId() == 1L);
Assert.assertTrue(dli.hasPrevious());
Assert.assertTrue(dli.previous().getId() == null);
Assert.assertTrue(!dli.hasPrevious());
Assert.assertTrue(dli.hasNext());
for (int index = 0; index < 10; index++) {
Data<Long> data3 = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
Assert.assertTrue(dli.addAtEnd(data3));
}
Assert.assertTrue(!dli.hasPrevious());
dli.next();
for (int index = 0; index < 10; index++) {
Assert.assertTrue(dli.next().getId() == null);
}
final ExecutorService exec = Executors.newSingleThreadExecutor();
final Runnable runnable = new Runnable() {
long index = 0;
@Override
public void run() {
for (; index < count; index++) {
Data<Long> data3 = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
data3.setId(index);
synchronized (dli) {
dli.addAtEnd(data3);
}
synchronized (dli2) {
dli2.addAtEnd(data3);
}
System.out.println("Index : " + String.valueOf(index));
}
}
};
try {
for (int threadIndex=0; threadIndex<Runtime.getRuntime().availableProcessors()*2; threadIndex++) {
exec.execute(runnable);
}
} finally {
exec.shutdown();
while (!exec.isTerminated()) {
Thread.sleep(0,1);
}
}
final LongListIterator<Data<Long>> dli3 = new LongListIterator<>();
Parallel.For(dli2, new Parallel.Operation<Data<Long>>() {
@Override
public void perform(Data<Long> pParameter) {
System.out.println(pParameter);
synchronized (dli3) {
dli3.addAtEnd(pParameter);
}
}
@Override
public boolean follow() {
return true;
}
});
Collection<Data<Long>> collection = dli.getCollection();
Assert.assertTrue(collection.remove(data));
for (int index = 0; index < 10; index++) {
Data<Long> data3 = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
Assert.assertTrue(collection.remove(data3));
}
Assert.assertTrue(!collection.contains(data));
Assert.assertTrue(collection.remove(data2));
Assert.assertTrue(collection.size() == dli2.nextCollection().size());
Assert.assertTrue(dli.equals(dli2));
System.out.println("dli equals dli2");
final long i = count-1;
Data<Long> data3 = new ca.qc.bergeron.marcantoine.crammeur.librairy.models.Data<Long>() {
Long Id = null;
{
setId(i);
}
@Nullable
@Override
public Long getId() {
return Id;
}
@Override
public void setId(@Nullable Long pId) {
this.Id = pId;
}
};
Assert.assertTrue(collection.remove(data3));
System.out.println("Remove last data");
Assert.assertTrue(!collection.contains(data3));
Assert.assertTrue(!dli.equals(dli2));
Assert.assertTrue(dli2.getCollection().addAll(dli2.getCollection().size()/2, new ArrayList<>(dli2.getCollection())));
Assert.assertTrue(dli3.nextCollection().addAll(dli3.getCollection().size()/2, new ArrayList<>(dli3.getCollection())));
Assert.assertTrue(dli2.equals(dli3));
final LongListIterator<Data<Long>> dli4 = new LongListIterator<>(dli3);
Assert.assertTrue(dli4.equals(dli3));
System.out.println("dli4 equals dli3");
dli4.clear();
Assert.assertTrue(dli4.size() == 0L);
Assert.assertTrue(!dli4.equals(dli3));
dli3.getCollection().clear();
Assert.assertTrue(!dli3.hasActual());
Assert.assertTrue(dli4.equals(dli3));
System.out.println("dli4 equals dli3");
collection.clear();
Assert.assertTrue(collection.isEmpty());
Assert.assertTrue(dli.equals(dli3));
Assert.assertTrue(collection.add(data));
Assert.assertTrue(dli3.addAtEnd(data));
Assert.assertTrue(collection.equals(dli3.nextCollection()));
Assert.assertTrue(dli.equals(dli3));
}
@Test
public void testKeyLongSetIterator() {
/*final SetIterator<Long> ksi = new LongSetIterator();
SetIterator<Long> ksi2 = new LongSetIterator();
Assert.assertTrue(ksi.equals(ksi2));
Assert.assertTrue(ksi.isEmpty() && ksi2.isEmpty());
ksi.add(0L);
Assert.assertTrue(ksi.contains(0L));
Assert.assertTrue(!ksi.equals(ksi2));
ksi2.add(1L);
Assert.assertTrue(ksi2.contains(1L));
Assert.assertTrue(!ksi2.equals(ksi));
Assert.assertTrue(ksi.size().equals(ksi2.size()));
Assert.assertTrue(ksi2.remove(1L));
Assert.assertTrue(ksi2.isEmpty());
ksi2.add(0L);
Assert.assertTrue(ksi.equals(ksi2));
try {
ksi.add(0L);
Assert.fail();
} catch (ContainsException e) {
//Is ok
}
ksi.clear();
Assert.assertTrue(ksi.isEmpty());
final long count = 3500000;
ExecutorService executorService = Executors.newSingleThreadExecutor();
final Runnable runnable = new Runnable() {
@Override
public void run() {
for (long getIndex = 0; getIndex < count; getIndex++) {
synchronized (ksi) {
ksi.add(getIndex);
}
System.out.println("Index : " + String.valueOf(getIndex));
}
}
};
try {
executorService.submit(runnable);
} finally {
executorService.shutdown();
while (!executorService.isTerminated()) {
try {
Thread.sleep(0,1);
} catch (InterruptedException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
Assert.assertTrue(ksi.size().equals(count));*/
}
@Test
public void testDataLongMap() {
}
@Test
public void testParallelExecute() {
Parallel.Running running = new Parallel.Running() {
@Override
public boolean restartPrevious() {
int random;
do {
random = new Random().nextInt(3);
System.out.println(String.valueOf(random) + " Restart");
} while (random == 1);
return random == 0;
}
@Nullable
@Override
public Parallel.Running<?> previousRun() {
return this;
}
@Override
public Object actualParam() {
return null;
}
@Override
public boolean startNext() {
int random;
do {
random = new Random().nextInt(3);
System.out.println(String.valueOf(random) + " Start");
} while (random == 1);
return random == 0;
}
@Nullable
@Override
public Parallel.Running<?> nextRun() {
return this;
}
@Override
public void perform(Object pParameter) {
}
@Override
public boolean follow() {
return true;
}
};
Parallel.Execute(running, null);
}
}
|
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.adapters.saml;
import org.apache.catalina.Manager;
import org.apache.catalina.Session;
import org.apache.catalina.connector.Request;
import org.apache.catalina.realm.GenericPrincipal;
import org.jboss.logging.Logger;
import org.keycloak.adapters.spi.HttpFacade;
import org.keycloak.adapters.spi.SessionIdMapper;
import org.keycloak.adapters.tomcat.CatalinaUserSessionManagement;
import org.keycloak.adapters.tomcat.GenericPrincipalFactory;
import org.keycloak.common.util.KeycloakUriBuilder;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class CatalinaSamlSessionStore implements SamlSessionStore {
protected static Logger log = Logger.getLogger(SamlSessionStore.class);
public static final String SAML_REDIRECT_URI = "SAML_REDIRECT_URI";
private final CatalinaUserSessionManagement sessionManagement;
protected final GenericPrincipalFactory principalFactory;
private final SessionIdMapper idMapper;
protected final Request request;
protected final AbstractSamlAuthenticatorValve valve;
protected final HttpFacade facade;
protected final SamlDeployment deployment;
public CatalinaSamlSessionStore(CatalinaUserSessionManagement sessionManagement, GenericPrincipalFactory principalFactory,
SessionIdMapper idMapper, Request request, AbstractSamlAuthenticatorValve valve, HttpFacade facade,
SamlDeployment deployment) {
this.sessionManagement = sessionManagement;
this.principalFactory = principalFactory;
this.idMapper = idMapper;
this.request = request;
this.valve = valve;
this.facade = facade;
this.deployment = deployment;
}
@Override
public void setCurrentAction(CurrentAction action) {
if (action == CurrentAction.NONE && request.getSession(false) == null) return;
request.getSession().setAttribute(CURRENT_ACTION, action);
}
@Override
public boolean isLoggingIn() {
HttpSession session = request.getSession(false);
if (session == null) return false;
CurrentAction action = (CurrentAction)session.getAttribute(CURRENT_ACTION);
return action == CurrentAction.LOGGING_IN;
}
@Override
public boolean isLoggingOut() {
HttpSession session = request.getSession(false);
if (session == null) return false;
CurrentAction action = (CurrentAction)session.getAttribute(CURRENT_ACTION);
return action == CurrentAction.LOGGING_OUT;
}
@Override
public void logoutAccount() {
Session sessionInternal = request.getSessionInternal(false);
if (sessionInternal == null) return;
HttpSession session = sessionInternal.getSession();
if (session != null) {
SamlSession samlSession = (SamlSession)session.getAttribute(SamlSession.class.getName());
if (samlSession != null) {
if (samlSession.getSessionIndex() != null) {
idMapper.removeSession(session.getId());
}
session.removeAttribute(SamlSession.class.getName());
}
session.removeAttribute(SAML_REDIRECT_URI);
}
sessionInternal.setPrincipal(null);
sessionInternal.setAuthType(null);
}
@Override
public void logoutByPrincipal(String principal) {
Set<String> sessions = idMapper.getUserSessions(principal);
if (sessions != null) {
List<String> ids = new LinkedList<String>();
ids.addAll(sessions);
logoutSessionIds(ids);
for (String id : ids) {
idMapper.removeSession(id);
}
}
}
@Override
public void logoutBySsoId(List<String> ssoIds) {
if (ssoIds == null) return;
List<String> sessionIds = new LinkedList<String>();
for (String id : ssoIds) {
String sessionId = idMapper.getSessionFromSSO(id);
if (sessionId != null) {
sessionIds.add(sessionId);
idMapper.removeSession(sessionId);
}
}
logoutSessionIds(sessionIds);
}
protected void logoutSessionIds(List<String> sessionIds) {
if (sessionIds == null || sessionIds.isEmpty()) return;
Manager sessionManager = request.getContext().getManager();
sessionManagement.logoutHttpSessions(sessionManager, sessionIds);
}
@Override
public boolean isLoggedIn() {
Session session = request.getSessionInternal(false);
if (session == null) return false;
if (session == null) {
log.debug("session was null, returning null");
return false;
}
final SamlSession samlSession = (SamlSession)session.getSession().getAttribute(SamlSession.class.getName());
if (samlSession == null) {
log.debug("SamlSession was not in session, returning null");
return false;
}
GenericPrincipal principal = (GenericPrincipal) session.getPrincipal();
if (samlSession.getPrincipal().getName().equals(principal.getName()))
// in clustered environment in JBossWeb, principal is not serialized or saved
if (principal == null) {
principal = principalFactory.createPrincipal(request.getContext().getRealm(), samlSession.getPrincipal(), samlSession.getRoles());
session.setPrincipal(principal);
session.setAuthType("KEYCLOAK-SAML");
} else {
if (!principal.getUserPrincipal().getName().equals(samlSession.getPrincipal().getName())) {
throw new RuntimeException("Unknown State");
}
log.debug("************principal already in");
if (log.isDebugEnabled()) {
for (String role : principal.getRoles()) {
log.debug("principal role: " + role);
}
}
}
request.setUserPrincipal(principal);
request.setAuthType("KEYCLOAK-SAML");
restoreRequest();
return true;
}
@Override
public void saveAccount(SamlSession account) {
Session session = request.getSessionInternal(true);
session.getSession().setAttribute(SamlSession.class.getName(), account);
GenericPrincipal principal = (GenericPrincipal) session.getPrincipal();
// in clustered environment in JBossWeb, principal is not serialized or saved
if (principal == null) {
principal = principalFactory.createPrincipal(request.getContext().getRealm(), account.getPrincipal(), account.getRoles());
session.setPrincipal(principal);
session.setAuthType("KEYCLOAK-SAML");
}
request.setUserPrincipal(principal);
request.setAuthType("KEYCLOAK-SAML");
String newId = changeSessionId(session);
idMapper.map(account.getSessionIndex(), account.getPrincipal().getSamlSubject(), newId);
}
protected String changeSessionId(Session session) {
return session.getId();
}
@Override
public SamlSession getAccount() {
HttpSession session = getSession(true);
return (SamlSession)session.getAttribute(SamlSession.class.getName());
}
@Override
public String getRedirectUri() {
String redirect = (String)getSession(true).getAttribute(SAML_REDIRECT_URI);
if (redirect == null) {
String contextPath = request.getContextPath();
String baseUri = KeycloakUriBuilder.fromUri(request.getRequestURL().toString()).replacePath(contextPath).build().toString();
return SamlUtil.getRedirectTo(facade, contextPath, baseUri);
}
return redirect;
}
@Override
public void saveRequest() {
try {
valve.keycloakSaveRequest(request);
} catch (IOException e) {
throw new RuntimeException(e);
}
getSession(true).setAttribute(SAML_REDIRECT_URI, facade.getRequest().getURI());
}
@Override
public boolean restoreRequest() {
getSession(true).removeAttribute(SAML_REDIRECT_URI);
return valve.keycloakRestoreRequest(request);
}
protected HttpSession getSession(boolean create) {
Session session = request.getSessionInternal(create);
if (session == null) return null;
return session.getSession();
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLockObtainFailedException;
import org.elasticsearch.gateway.GatewayMetaState;
import org.elasticsearch.gateway.LocalAllocateDangledIndices;
import org.elasticsearch.gateway.MetaStateService;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.StringFieldMapper;
import org.elasticsearch.index.shard.IllegalIndexShardStateException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
import org.elasticsearch.indices.IndicesService.ShardDeletionCheckResult;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class IndicesServiceTests extends ESSingleNodeTestCase {
public IndicesService getIndicesService() {
return getInstanceFromNode(IndicesService.class);
}
public NodeEnvironment getNodeEnvironment() {
return getInstanceFromNode(NodeEnvironment.class);
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.getPlugins());
plugins.add(TestPlugin.class);
return plugins;
}
public static class TestPlugin extends Plugin implements MapperPlugin {
public TestPlugin() {}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap("fake-mapper", new StringFieldMapper.TypeParser());
}
@Override
public void onIndexModule(IndexModule indexModule) {
super.onIndexModule(indexModule);
indexModule.addSimilarity("fake-similarity", BM25SimilarityProvider::new);
}
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testCanDeleteIndexContent() throws IOException {
final IndicesService indicesService = getIndicesService();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder()
.put(IndexMetaData.SETTING_SHADOW_REPLICAS, true)
.put(IndexMetaData.SETTING_DATA_PATH, "/foo/bar")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 4))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 3))
.build());
assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings));
final IndexMetaData.Builder newIndexMetaData = IndexMetaData.builder(idxSettings.getIndexMetaData());
newIndexMetaData.state(IndexMetaData.State.CLOSE);
idxSettings = IndexSettingsModule.newIndexSettings(newIndexMetaData.build());
assertTrue("shard on shared filesystem, but closed, so it should be deletable",
indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings));
}
public void testCanDeleteShardContent() {
IndicesService indicesService = getIndicesService();
IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(
1).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings());
ShardId shardId = new ShardId(meta.getIndex(), 0);
assertEquals("no shard location", indicesService.canDeleteShardContent(shardId, indexSettings),
ShardDeletionCheckResult.NO_FOLDER_FOUND);
IndexService test = createIndex("test");
shardId = new ShardId(test.index(), 0);
assertTrue(test.hasShard(0));
assertEquals("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()),
ShardDeletionCheckResult.STILL_ALLOCATED);
test.removeShard(0, "boom");
assertEquals("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()),
ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE);
ShardId notAllocated = new ShardId(test.index(), 100);
assertEquals("shard that was never on this node should NOT be deletable",
indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings()), ShardDeletionCheckResult.NO_FOLDER_FOUND);
}
/*
public void testDeleteIndexStore() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
IndexMetaData firstMetaData = clusterService.state().metaData().index("test");
assertTrue(test.hasShard(0));
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
GatewayMetaState gwMetaState = getInstanceFromNode(GatewayMetaState.class);
MetaData meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNotNull(meta.index("test"));
assertAcked(client().admin().indices().prepareDelete("test"));
meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNull(meta.index("test"));
test = createIndex("test");
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get();
client().admin().indices().prepareFlush("test").get();
assertHitCount(client().prepareSearch("test").get(), 1);
IndexMetaData secondMetaData = clusterService.state().metaData().index("test");
assertAcked(client().admin().indices().prepareClose("test"));
ShardPath path = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertTrue(path.exists());
try {
indicesService.deleteIndexStore("boom", secondMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
assertTrue(path.exists());
// now delete the old one and make sure we resolve against the name
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
assertAcked(client().admin().indices().prepareOpen("test"));
ensureGreen("test");
}
*/
/*
public void testPendingTasks() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
assertTrue(test.hasShard(0));
ShardPath path = test.getShardOrNull(0).shardPath();
assertTrue(test.getShardOrNull(0).routingEntry().started());
ShardPath shardPath = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertEquals(shardPath, path);
try {
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
fail("can't get lock");
} catch (ShardLockObtainFailedException ex) {
}
assertTrue(path.exists());
int numPending = 1;
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
} else {
if (randomBoolean()) {
numPending++;
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
}
indicesService.addPendingDelete(test.index(), test.getIndexSettings());
}
assertAcked(client().admin().indices().prepareClose("test"));
assertTrue(path.exists());
assertEquals(indicesService.numPendingDeletes(test.index()), numPending);
assertTrue(indicesService.hasUncompletedPendingDeletes());
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
assertFalse(indicesService.hasUncompletedPendingDeletes());
assertFalse(path.exists());
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId(test.index(), 1), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId("bogus", "_na_", 1), test.getIndexSettings());
assertEquals(indicesService.numPendingDeletes(test.index()), 2);
assertTrue(indicesService.hasUncompletedPendingDeletes());
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
assertTrue(indicesService.hasUncompletedPendingDeletes()); // "bogus" index has not been removed
}
assertAcked(client().admin().indices().prepareOpen("test"));
}
*/
public void testVerifyIfIndexContentDeleted() throws Exception {
final Index index = new Index("test", UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final NodeEnvironment nodeEnv = getNodeEnvironment();
final MetaStateService metaStateService = getInstanceFromNode(MetaStateService.class);
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
metaStateService.writeIndex("test index being created", indexMetaData);
final MetaData metaData = MetaData.builder(clusterService.state().metaData()).put(indexMetaData, true).build();
final ClusterState csWithIndex = new ClusterState.Builder(clusterService.state()).metaData(metaData).build();
try {
indicesService.verifyIndexIsDeleted(index, csWithIndex);
fail("Should not be able to delete index contents when the index is part of the cluster state.");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), containsString("Cannot delete index"));
}
final ClusterState withoutIndex = new ClusterState.Builder(csWithIndex)
.metaData(MetaData.builder(csWithIndex.metaData()).remove(index.getName()))
.build();
indicesService.verifyIndexIsDeleted(index, withoutIndex);
assertFalse("index files should be deleted", FileSystemUtils.exists(nodeEnv.indexPaths(index)));
}
/*
public void testDanglingIndicesWithAliasConflict() throws Exception {
final String indexName = "test-idx1";
final String alias = "test-alias";
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
createIndex(indexName);
// create the alias for the index
client().admin().indices().prepareAliases().addAlias(indexName, alias).get();
final ClusterState originalState = clusterService.state();
// try to import a dangling index with the same name as the alias, it should fail
final LocalAllocateDangledIndices dangling = getInstanceFromNode(LocalAllocateDangledIndices.class);
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(alias)
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
DanglingListener listener = new DanglingListener();
dangling.allocateDangled(Arrays.asList(indexMetaData), listener);
listener.latch.await();
assertThat(clusterService.state(), equalTo(originalState));
// remove the alias
client().admin().indices().prepareAliases().removeAlias(indexName, alias).get();
// now try importing a dangling index with the same name as the alias, it should succeed.
listener = new DanglingListener();
dangling.allocateDangled(Arrays.asList(indexMetaData), listener);
listener.latch.await();
assertThat(clusterService.state(), not(originalState));
assertNotNull(clusterService.state().getMetaData().index(alias));
}
*/
/**
* This test checks an edge case where, if a node had an index (lets call it A with UUID 1), then
* deleted it (so a tombstone entry for A will exist in the cluster state), then created
* a new index A with UUID 2, then shutdown, when the node comes back online, it will look at the
* tombstones for deletions, and it should proceed with trying to delete A with UUID 1 and not
* throw any errors that the index still exists in the cluster state. This is a case of ensuring
* that tombstones that have the same name as current valid indices don't cause confusion by
* trying to delete an index that exists.
* See https://github.com/elastic/elasticsearch/issues/18054
*/
public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception {
final String indexName = "test";
final Index index = new Index(indexName, UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
final Index tombstonedIndex = new Index(indexName, UUIDs.randomBase64UUID());
final IndexGraveyard graveyard = IndexGraveyard.builder().addTombstone(tombstonedIndex).build();
final MetaData metaData = MetaData.builder().put(indexMetaData, true).indexGraveyard(graveyard).build();
final ClusterState clusterState = new ClusterState.Builder(new ClusterName("testCluster")).metaData(metaData).build();
// if all goes well, this won't throw an exception, otherwise, it will throw an IllegalStateException
indicesService.verifyIndexIsDeleted(tombstonedIndex, clusterState);
}
private static class DanglingListener implements LocalAllocateDangledIndices.Listener {
final CountDownLatch latch = new CountDownLatch(1);
@Override
public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) {
latch.countDown();
}
@Override
public void onFailure(Throwable e) {
latch.countDown();
}
}
/**
* Tests that teh {@link MapperService} created by {@link IndicesService#createIndexMapperService(IndexMetaData)} contains
* custom types and similarities registered by plugins
*/
public void testStandAloneMapperServiceWithPlugins() throws IOException {
final String indexName = "test";
final Index index = new Index(indexName, UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexModule.SIMILARITY_SETTINGS_PREFIX + ".test.type", "fake-similarity")
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
MapperService mapperService = indicesService.createIndexMapperService(indexMetaData);
assertNotNull(mapperService.documentMapperParser().parserContext("type").typeParser("fake-mapper"));
assertThat(mapperService.documentMapperParser().parserContext("type").getSimilarity("test"),
instanceOf(BM25SimilarityProvider.class));
}
public void testStatsByShardDoesNotDieFromExpectedExceptions() {
final int shardCount = randomIntBetween(2, 5);
final int failedShardId = randomIntBetween(0, shardCount - 1);
final Index index = new Index("test-index", "abc123");
// the shard that is going to fail
final ShardId shardId = new ShardId(index, failedShardId);
final List<IndexShard> shards = new ArrayList<>(shardCount);
final List<IndexShardStats> shardStats = new ArrayList<>(shardCount - 1);
final IndexShardState state = randomFrom(IndexShardState.values());
final String message = "TEST - expected";
final RuntimeException expectedException =
randomFrom(new IllegalIndexShardStateException(shardId, state, message), new AlreadyClosedException(message));
// this allows us to control the indices that exist
final IndicesService mockIndicesService = mock(IndicesService.class);
final IndexService indexService = mock(IndexService.class);
// generate fake shards and their responses
for (int i = 0; i < shardCount; ++i) {
final IndexShard shard = mock(IndexShard.class);
shards.add(shard);
if (failedShardId != i) {
final IndexShardStats successfulShardStats = mock(IndexShardStats.class);
shardStats.add(successfulShardStats);
when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenReturn(successfulShardStats);
} else {
when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenThrow(expectedException);
}
}
when(mockIndicesService.iterator()).thenReturn(Collections.singleton(indexService).iterator());
when(indexService.iterator()).thenReturn(shards.iterator());
when(indexService.index()).thenReturn(index);
// real one, which has a logger defined
final IndicesService indicesService = getIndicesService();
final Map<Index, List<IndexShardStats>> indexStats = indicesService.statsByShard(mockIndicesService, CommonStatsFlags.ALL);
assertThat(indexStats.isEmpty(), equalTo(false));
assertThat("index not defined", indexStats.containsKey(index), equalTo(true));
assertThat("unexpected shard stats", indexStats.get(index), equalTo(shardStats));
}
}
|
|
package org.testcontainers.utility;
import com.google.common.base.Charsets;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.lang.SystemUtils;
import org.jetbrains.annotations.NotNull;
import org.testcontainers.images.builder.Transferable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import static lombok.AccessLevel.PACKAGE;
import static org.testcontainers.utility.PathUtils.recursiveDeleteDir;
/**
* An abstraction over files and classpath resources aimed at encapsulating all the complexity of generating
* a path that the Docker daemon is about to create a volume mount for.
*/
@RequiredArgsConstructor(access = PACKAGE)
@Slf4j
public class MountableFile implements Transferable {
private static final String TESTCONTAINERS_TMP_DIR_PREFIX = ".testcontainers-tmp-";
private static final String OS_MAC_TMP_DIR = "/tmp";
private static final int BASE_FILE_MODE = 0100000;
private static final int BASE_DIR_MODE = 0040000;
private final String path;
private final Integer forcedFileMode;
@Getter(lazy = true)
private final String resolvedPath = resolvePath();
@Getter(lazy = true)
private final String filesystemPath = resolveFilesystemPath();
private String resourcePath;
/**
* Obtains a {@link MountableFile} corresponding to a resource on the classpath (including resources in JAR files)
*
* @param resourceName the classpath path to the resource
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forClasspathResource(@NotNull final String resourceName) {
return forClasspathResource(resourceName, null);
}
/**
* Obtains a {@link MountableFile} corresponding to a file on the docker host filesystem.
*
* @param path the path to the resource
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forHostPath(@NotNull final String path) {
return forHostPath(path, null);
}
/**
* Obtains a {@link MountableFile} corresponding to a file on the docker host filesystem.
*
* @param path the path to the resource
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forHostPath(final Path path) {
return forHostPath(path, null);
}
/**
* Obtains a {@link MountableFile} corresponding to a resource on the classpath (including resources in JAR files)
*
* @param resourceName the classpath path to the resource
* @param mode octal value of posix file mode (000..777)
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forClasspathResource(@NotNull final String resourceName, Integer mode) {
return new MountableFile(getClasspathResource(resourceName, new HashSet<>()).toString(), mode);
}
/**
* Obtains a {@link MountableFile} corresponding to a file on the docker host filesystem.
*
* @param path the path to the resource
* @param mode octal value of posix file mode (000..777)
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forHostPath(@NotNull final String path, Integer mode) {
return new MountableFile(new File(path).toURI().toString(), mode);
}
/**
* Obtains a {@link MountableFile} corresponding to a file on the docker host filesystem.
*
* @param path the path to the resource
* @param mode octal value of posix file mode (000..777)
* @return a {@link MountableFile} that may be used to obtain a mountable path
*/
public static MountableFile forHostPath(final Path path, Integer mode) {
return new MountableFile(path.toAbsolutePath().toString(), mode);
}
@NotNull
private static URL getClasspathResource(@NotNull final String resourcePath, @NotNull final Set<ClassLoader> classLoaders) {
final Set<ClassLoader> classLoadersToSearch = new HashSet<>(classLoaders);
// try context and system classloaders as well
classLoadersToSearch.add(Thread.currentThread().getContextClassLoader());
classLoadersToSearch.add(ClassLoader.getSystemClassLoader());
classLoadersToSearch.add(MountableFile.class.getClassLoader());
for (final ClassLoader classLoader : classLoadersToSearch) {
URL resource = classLoader.getResource(resourcePath);
if (resource != null) {
return resource;
}
// Be lenient if an absolute path was given
if (resourcePath.startsWith("/")) {
resource = classLoader.getResource(resourcePath.replaceFirst("/", ""));
if (resource != null) {
return resource;
}
}
}
throw new IllegalArgumentException("Resource with path " + resourcePath + " could not be found on any of these classloaders: " + classLoadersToSearch);
}
private static String unencodeResourceURIToFilePath(@NotNull final String resource) {
try {
// Convert any url-encoded characters (e.g. spaces) back into unencoded form
return URLDecoder.decode(resource, Charsets.UTF_8.name())
.replaceFirst("jar:", "")
.replaceFirst("file:", "")
.replaceAll("!.*", "");
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
/**
* Obtain a path that the Docker daemon should be able to use to volume mount a file/resource
* into a container. If this is a classpath resource residing in a JAR, it will be extracted to
* a temporary location so that the Docker daemon is able to access it.
*
* @return a volume-mountable path.
*/
private String resolvePath() {
String result = getResourcePath();
if (SystemUtils.IS_OS_WINDOWS && result.startsWith("/")) {
result = result.substring(1);
}
return result;
}
/**
* Obtain a path in local filesystem that the Docker daemon should be able to use to volume mount a file/resource
* into a container. If this is a classpath resource residing in a JAR, it will be extracted to
* a temporary location so that the Docker daemon is able to access it.
*
* TODO: rename method accordingly and check if really needed like this
*
* @return
*/
private String resolveFilesystemPath() {
String result = getResourcePath();
if (SystemUtils.IS_OS_WINDOWS && result.startsWith("/")) {
result = PathUtils.createMinGWPath(result).substring(1);
}
return result;
}
private String getResourcePath() {
if (path.contains(".jar!")) {
resourcePath = extractClassPathResourceToTempLocation(this.path);
} else {
resourcePath = unencodeResourceURIToFilePath(path);
}
return resourcePath;
}
/**
* Extract a file or directory tree from a JAR file to a temporary location.
* This allows Docker to mount classpath resources as files.
*
* @param hostPath the path on the host, expected to be of the format 'file:/path/to/some.jar!/classpath/path/to/resource'
* @return the path of the temporary file/directory
*/
private String extractClassPathResourceToTempLocation(final String hostPath) {
File tmpLocation = createTempDirectory();
//noinspection ResultOfMethodCallIgnored
tmpLocation.delete();
String urldecodedJarPath = unencodeResourceURIToFilePath(hostPath);
String internalPath = hostPath.replaceAll("[^!]*!/", "");
try (JarFile jarFile = new JarFile(urldecodedJarPath)) {
Enumeration<JarEntry> entries = jarFile.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
final String name = entry.getName();
if (name.startsWith(internalPath)) {
log.debug("Copying classpath resource(s) from {} to {} to permit Docker to bind",
hostPath,
tmpLocation);
copyFromJarToLocation(jarFile, entry, internalPath, tmpLocation);
}
}
} catch (IOException e) {
throw new IllegalStateException("Failed to process JAR file when extracting classpath resource: " + hostPath, e);
}
// Mark temporary files/dirs for deletion at JVM shutdown
deleteOnExit(tmpLocation.toPath());
return tmpLocation.getAbsolutePath();
}
private File createTempDirectory() {
try {
if (SystemUtils.IS_OS_MAC) {
return Files.createTempDirectory(Paths.get(OS_MAC_TMP_DIR), TESTCONTAINERS_TMP_DIR_PREFIX).toFile();
}
return Files.createTempDirectory(TESTCONTAINERS_TMP_DIR_PREFIX).toFile();
} catch (IOException e) {
return new File(TESTCONTAINERS_TMP_DIR_PREFIX + Base58.randomString(5));
}
}
@SuppressWarnings("ResultOfMethodCallIgnored")
private void copyFromJarToLocation(final JarFile jarFile,
final JarEntry entry,
final String fromRoot,
final File toRoot) throws IOException {
String destinationName = entry.getName().replaceFirst(fromRoot, "");
File newFile = new File(toRoot, destinationName);
log.debug("Copying resource {} from JAR file {}",
fromRoot,
jarFile.getName());
if (!entry.isDirectory()) {
// Create parent directories
Path parent = newFile.getAbsoluteFile().toPath().getParent();
parent.toFile().mkdirs();
newFile.deleteOnExit();
try (InputStream is = jarFile.getInputStream(entry)) {
Files.copy(is, newFile.toPath());
} catch (IOException e) {
log.error("Failed to extract classpath resource " + entry.getName() + " from JAR file " + jarFile.getName(), e);
throw e;
}
}
}
private void deleteOnExit(final Path path) {
Runtime.getRuntime().addShutdownHook(new Thread(() -> recursiveDeleteDir(path)));
}
/**
* {@inheritDoc}
*/
@Override
public void transferTo(final TarArchiveOutputStream outputStream, String destinationPathInTar) {
recursiveTar(destinationPathInTar, this.getResolvedPath(), this.getResolvedPath(), outputStream);
}
/*
* Recursively copies a file/directory into a TarArchiveOutputStream
*/
private void recursiveTar(String entryFilename, String rootPath, String itemPath, TarArchiveOutputStream tarArchive) {
try {
final File sourceFile = new File(itemPath).getCanonicalFile(); // e.g. /foo/bar/baz
final File sourceRootFile = new File(rootPath).getCanonicalFile(); // e.g. /foo
final String relativePathToSourceFile = sourceRootFile.toPath().relativize(sourceFile.toPath()).toFile().toString(); // e.g. /bar/baz
final TarArchiveEntry tarEntry = new TarArchiveEntry(sourceFile, entryFilename + "/" + relativePathToSourceFile); // entry filename e.g. /xyz/bar/baz
// TarArchiveEntry automatically sets the mode for file/directory, but we can update to ensure that the mode is set exactly (inc executable bits)
tarEntry.setMode(getUnixFileMode(itemPath));
tarArchive.putArchiveEntry(tarEntry);
if (sourceFile.isFile()) {
Files.copy(sourceFile.toPath(), tarArchive);
}
// a directory entry merely needs to exist in the TAR file - there is no data stored yet
tarArchive.closeArchiveEntry();
final File[] children = sourceFile.listFiles();
if (children != null) {
// recurse into child files/directories
for (final File child : children) {
recursiveTar(entryFilename, sourceRootFile.getCanonicalPath(), child.getCanonicalPath(), tarArchive);
}
}
} catch (IOException e) {
log.error("Error when copying TAR file entry: {}", itemPath, e);
throw new UncheckedIOException(e); // fail fast
}
}
@Override
public long getSize() {
final File file = new File(this.getResolvedPath());
if (file.isFile()) {
return file.length();
} else {
return 0;
}
}
@Override
public String getDescription() {
return this.getResolvedPath();
}
@Override
public int getFileMode() {
return getUnixFileMode(this.getResolvedPath());
}
private int getUnixFileMode(final String pathAsString) {
final Path path = Paths.get(pathAsString);
if (this.forcedFileMode != null) {
return this.getModeValue(path);
}
try {
return (int) Files.getAttribute(path, "unix:mode");
} catch (IOException | UnsupportedOperationException e) {
// fallback for non-posix environments
int mode = DEFAULT_FILE_MODE;
if (Files.isDirectory(path)) {
mode = DEFAULT_DIR_MODE;
} else if (Files.isExecutable(path)) {
mode |= 0111; // equiv to +x for user/group/others
}
return mode;
}
}
private int getModeValue(final Path path) {
int result = Files.isDirectory(path) ? BASE_DIR_MODE : BASE_FILE_MODE;
return result | this.forcedFileMode;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.data.management.retention.dataset;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.Singular;
import org.apache.hadoop.fs.FileSystem;
import org.slf4j.Logger;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.data.management.policy.EmbeddedRetentionSelectionPolicy;
import org.apache.gobblin.data.management.policy.SelectNothingPolicy;
import org.apache.gobblin.data.management.policy.VersionSelectionPolicy;
import org.apache.gobblin.data.management.retention.action.RetentionAction;
import org.apache.gobblin.data.management.retention.policy.RetentionPolicy;
import org.apache.gobblin.data.management.trash.ProxiedTrash;
import org.apache.gobblin.data.management.version.DatasetVersion;
import org.apache.gobblin.data.management.version.FileSystemDatasetVersion;
import org.apache.gobblin.data.management.version.finder.VersionFinder;
import org.apache.gobblin.dataset.FileSystemDataset;
import org.apache.gobblin.util.ConfigUtils;
/**
* A {@link CleanableDataset} that may have multiple {@link VersionFinder}, {@link VersionSelectionPolicy}
* and {@link RetentionAction}s. Retention needs to performed for different kinds of {@link DatasetVersion}s. Each
* kind of {@link DatasetVersion} can have its own {@link VersionSelectionPolicy} and/or {@link RetentionAction}
* associated with it.
* <ul>
* <li>{@link MultiVersionCleanableDatasetBase#getVersionFindersAndPolicies()} gets a list {@link VersionFinderAndPolicy}s
* <li>Each {@link VersionFinderAndPolicy} contains a {@link VersionFinder} and a {@link VersionSelectionPolicy}. It can
* optionally have a {@link RetentionAction}
* <li>The {@link MultiVersionCleanableDatasetBase#clean()} method finds all the {@link FileSystemDatasetVersion}s using
* {@link VersionFinderAndPolicy#versionFinder}
* <li> It gets the deletable {@link FileSystemDatasetVersion}s by applying {@link VersionFinderAndPolicy#versionSelectionPolicy}.
* These deletable version are deleted and then deletes empty parent directories.
* <li>If additional retention actions are available at {@link VersionFinderAndPolicy#getRetentionActions()}, all versions
* found by the {@link VersionFinderAndPolicy#versionFinder} are passed to {@link RetentionAction#execute(List)} for
* each {@link RetentionAction}
* </ul>
*
* <p>
* Concrete subclasses should implement {@link #getVersionFindersAndPolicies()}
* </p>
*
* <p>
* Datasets are directories in the filesystem containing data files organized in version-like directory structures.
* Example datasets:
* </p>
*
* <p>
* For snapshot based datasets, with the directory structure:
* <pre>
* /path/to/table/
* snapshot1/
* dataFiles...
* snapshot2/
* dataFiles...
* </pre>
* each of snapshot1 and snapshot2 are dataset versions.
* </p>
*
* <p>
* For tracking datasets, with the directory structure:
* <pre>
* /path/to/tracking/data/
* 2015/
* 06/
* 01/
* dataFiles...
* 02/
* dataFiles...
* </pre>
* each of 2015/06/01 and 2015/06/02 are dataset versions.
* </p>
*
* @param <T> type of {@link FileSystemDatasetVersion} supported by this {@link CleanableDataset}.
*/
public abstract class MultiVersionCleanableDatasetBase<T extends FileSystemDatasetVersion>
implements CleanableDataset, FileSystemDataset {
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
public static final String CONFIGURATION_KEY_PREFIX = FsCleanableHelper.CONFIGURATION_KEY_PREFIX;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
public static final String SIMULATE_KEY = FsCleanableHelper.SIMULATE_KEY;
public static final String SIMULATE_DEFAULT = FsCleanableHelper.SIMULATE_DEFAULT;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
public static final String SKIP_TRASH_KEY = FsCleanableHelper.SKIP_TRASH_KEY;
public static final String SKIP_TRASH_DEFAULT = FsCleanableHelper.SKIP_TRASH_DEFAULT;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
public static final String DELETE_EMPTY_DIRECTORIES_KEY = FsCleanableHelper.DELETE_EMPTY_DIRECTORIES_KEY;
public static final String DELETE_EMPTY_DIRECTORIES_DEFAULT = FsCleanableHelper.DELETE_EMPTY_DIRECTORIES_DEFAULT;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
public static final String DELETE_AS_OWNER_KEY = FsCleanableHelper.DELETE_AS_OWNER_KEY;
public static final String DELETE_AS_OWNER_DEFAULT = FsCleanableHelper.DELETE_AS_OWNER_DEFAULT;
public static final String IS_DATASET_BLACKLISTED_KEY = CONFIGURATION_KEY_PREFIX + "dataset.is.blacklisted";
public static final String IS_DATASET_BLACKLISTED_DEFAULT = Boolean.toString(false);
protected final FileSystem fs;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
protected final ProxiedTrash trash;
@Getter
@VisibleForTesting
protected final boolean isDatasetBlacklisted;
private final FsCleanableHelper fsCleanableHelper;
protected final Logger log;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
protected final boolean simulate;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
protected final boolean skipTrash;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
protected final boolean deleteEmptyDirectories;
/**
* @deprecated in favor of {@link FsCleanableHelper}
*/
@Deprecated
protected final boolean deleteAsOwner;
/**
* Get {@link org.apache.gobblin.data.management.retention.policy.RetentionPolicy} to use.
*/
public abstract List<VersionFinderAndPolicy<T>> getVersionFindersAndPolicies();
public MultiVersionCleanableDatasetBase(final FileSystem fs, final Properties props, Config config, Logger log)
throws IOException {
this(fs, props, Boolean.valueOf(props.getProperty(SIMULATE_KEY, SIMULATE_DEFAULT)),
Boolean.valueOf(props.getProperty(SKIP_TRASH_KEY, SKIP_TRASH_DEFAULT)),
Boolean.valueOf(props.getProperty(DELETE_EMPTY_DIRECTORIES_KEY, DELETE_EMPTY_DIRECTORIES_DEFAULT)),
Boolean.valueOf(props.getProperty(DELETE_AS_OWNER_KEY, DELETE_AS_OWNER_DEFAULT)),
ConfigUtils.getBoolean(config, IS_DATASET_BLACKLISTED_KEY, Boolean.valueOf(IS_DATASET_BLACKLISTED_DEFAULT)), log);
}
public MultiVersionCleanableDatasetBase(final FileSystem fs, final Properties props, Logger log) throws IOException {
// This constructor is used by retention jobs configured through job configs and do not use dataset configs from config store.
// IS_DATASET_BLACKLISTED_KEY is only available with dataset config. Hence set IS_DATASET_BLACKLISTED_KEY to default
// ...false for jobs running with job configs
this(fs, props, ConfigFactory.parseMap(ImmutableMap.<String, String> of(IS_DATASET_BLACKLISTED_KEY,
IS_DATASET_BLACKLISTED_DEFAULT)), log);
}
/**
* Constructor for {@link MultiVersionCleanableDatasetBase}.
* @param fs {@link org.apache.hadoop.fs.FileSystem} where files are located.
* @param properties {@link java.util.Properties} for object.
* @param simulate whether to simulate deletes.
* @param skipTrash if true, delete files and directories immediately.
* @param deleteEmptyDirectories if true, newly empty parent directories will be deleted.
* @param deleteAsOwner if true, all deletions will be executed as the owner of the file / directory.
* @param log logger to use.
* @param isDatasetBlacklisted if true, clean will be skipped for this dataset
*
* @throws IOException
*/
public MultiVersionCleanableDatasetBase(FileSystem fs, Properties properties, boolean simulate, boolean skipTrash,
boolean deleteEmptyDirectories, boolean deleteAsOwner, boolean isDatasetBlacklisted, Logger log)
throws IOException {
this.log = log;
this.fsCleanableHelper = new FsCleanableHelper(fs, properties, simulate, skipTrash, deleteEmptyDirectories, deleteAsOwner, log);
this.fs = fs;
this.simulate = simulate;
this.skipTrash = skipTrash;
this.deleteEmptyDirectories = deleteEmptyDirectories;
this.trash = this.fsCleanableHelper.getTrash();
this.deleteAsOwner = deleteAsOwner;
this.isDatasetBlacklisted = isDatasetBlacklisted;
}
public MultiVersionCleanableDatasetBase(FileSystem fs, Properties properties, boolean simulate, boolean skipTrash,
boolean deleteEmptyDirectories, boolean deleteAsOwner, Logger log) throws IOException {
this(fs, properties, simulate, skipTrash, deleteEmptyDirectories, deleteAsOwner,
Boolean.parseBoolean(IS_DATASET_BLACKLISTED_DEFAULT), log);
}
/**
* Method to perform the Retention operations for this dataset.
*
*<ul>
* <li>{@link MultiVersionCleanableDatasetBase#getVersionFindersAndPolicies()} gets a list {@link VersionFinderAndPolicy}s
* <li>Each {@link VersionFinderAndPolicy} contains a {@link VersionFinder} and a {@link VersionSelectionPolicy}. It can
* optionally have a {@link RetentionAction}
* <li>The {@link MultiVersionCleanableDatasetBase#clean()} method finds all the {@link FileSystemDatasetVersion}s using
* {@link VersionFinderAndPolicy#versionFinder}
* <li> It gets the deletable {@link FileSystemDatasetVersion}s by applying {@link VersionFinderAndPolicy#versionSelectionPolicy}.
* These deletable version are deleted and then deletes empty parent directories.
* <li>If additional retention actions are available at {@link VersionFinderAndPolicy#getRetentionActions()}, all versions
* found by the {@link VersionFinderAndPolicy#versionFinder} are passed to {@link RetentionAction#execute(List)} for
* each {@link RetentionAction}
* </ul>
*
*/
@Override
public void clean() throws IOException {
if (this.isDatasetBlacklisted) {
this.log.info("Dataset blacklisted. Cleanup skipped for " + datasetRoot());
return;
}
boolean atLeastOneFailureSeen = false;
for (VersionFinderAndPolicy<T> versionFinderAndPolicy : getVersionFindersAndPolicies()) {
VersionSelectionPolicy<T> selectionPolicy = versionFinderAndPolicy.getVersionSelectionPolicy();
VersionFinder<? extends T> versionFinder = versionFinderAndPolicy.getVersionFinder();
if (!selectionPolicy.versionClass().isAssignableFrom(versionFinder.versionClass())) {
throw new IOException("Incompatible dataset version classes.");
}
this.log.info(String.format("Cleaning dataset %s. Using version finder %s and policy %s", this,
versionFinder.getClass().getName(), selectionPolicy));
List<T> versions = Lists.newArrayList(versionFinder.findDatasetVersions(this));
if (versions.isEmpty()) {
this.log.warn("No dataset version can be found. Ignoring.");
continue;
}
Collections.sort(versions, Collections.reverseOrder());
Collection<T> deletableVersions = selectionPolicy.listSelectedVersions(versions);
cleanImpl(deletableVersions);
List<DatasetVersion> allVersions = Lists.newArrayList();
for (T ver : versions) {
allVersions.add(ver);
}
for (RetentionAction retentionAction : versionFinderAndPolicy.getRetentionActions()) {
try {
retentionAction.execute(allVersions);
} catch (Throwable t) {
atLeastOneFailureSeen = true;
log.error(String.format("RetentionAction %s failed for dataset %s", retentionAction.getClass().getName(),
this.datasetRoot()), t);
}
}
}
if (atLeastOneFailureSeen) {
throw new RuntimeException(String.format(
"At least one failure happened while processing %s. Look for previous logs for failures", datasetRoot()));
}
}
protected void cleanImpl(Collection<T> deletableVersions) throws IOException {
this.fsCleanableHelper.clean(deletableVersions, this);
}
@Override
public String toString() {
return datasetRoot().toString();
}
@Override
public String datasetURN() {
return this.datasetRoot().toString();
}
/**
* A composition of version finder
* @param <T> the type of {@link FileSystemDatasetVersion} this version finder knows to find
*/
@Getter
@Builder
@AllArgsConstructor
public static class VersionFinderAndPolicy<T extends FileSystemDatasetVersion> {
private final VersionSelectionPolicy<T> versionSelectionPolicy;
private final VersionFinder<? extends T> versionFinder;
@Singular
private final List<RetentionAction> retentionActions;
/**
* Constructor for backward compatibility
* @deprecated use {@link VersionFinderAndPolicyBuilder}
*/
@Deprecated
public VersionFinderAndPolicy(VersionSelectionPolicy<T> versionSelectionPolicy, VersionFinder<? extends T> versionFinder) {
this.versionSelectionPolicy = versionSelectionPolicy;
this.versionFinder = versionFinder;
this.retentionActions = Lists.newArrayList();
}
public VersionFinderAndPolicy(RetentionPolicy<T> retentionPolicy, VersionFinder<? extends T> versionFinder) {
this(new EmbeddedRetentionSelectionPolicy<>(retentionPolicy), versionFinder);
}
public static class VersionFinderAndPolicyBuilder<T extends FileSystemDatasetVersion> {
@SuppressWarnings("unchecked")
public VersionFinderAndPolicy<T> build() {
VersionSelectionPolicy<T> localVersionSelectionPolicy;
List<RetentionAction> localRetentionActions;
if (this.versionSelectionPolicy == null) {
localVersionSelectionPolicy = (VersionSelectionPolicy<T>) new SelectNothingPolicy(new Properties());
} else {
localVersionSelectionPolicy = this.versionSelectionPolicy;
}
if (this.retentionActions == null) {
localRetentionActions = Lists.newArrayList();
} else {
localRetentionActions = Lists.newArrayList(this.retentionActions);
}
return new VersionFinderAndPolicy<T>(localVersionSelectionPolicy, this.versionFinder,
localRetentionActions);
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class TestMonitorActivity {
@Test
public void testFirstMessage() throws InterruptedException {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(1000L));
runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "false");
runner.setProperty(MonitorActivity.THRESHOLD, "100 millis");
runner.enqueue(new byte[0]);
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS, 1);
runner.clearTransferState();
Thread.sleep(1000L);
runNext(runner);
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE, 1);
runner.clearTransferState();
// ensure we don't keep creating the message
for (int i = 0; i < 10; i++) {
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
Thread.sleep(100L);
}
Map<String, String> attributes = new HashMap<>();
attributes.put("key", "value");
attributes.put("key1", "value1");
runner.enqueue(new byte[0], attributes);
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
MockFlowFile restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
String flowFileContent = new String(restoredFlowFile.toByteArray());
Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
restoredFlowFile.assertAttributeNotExists("key");
restoredFlowFile.assertAttributeNotExists("key1");
runner.clearTransferState();
runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "true");
Thread.sleep(200L);
for (int i = 0; i < 10; i++) {
runNext(runner);
Thread.sleep(200L);
}
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 10);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.clearTransferState();
runner.enqueue(new byte[0], attributes);
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
flowFileContent = new String(restoredFlowFile.toByteArray());
Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
restoredFlowFile.assertAttributeNotExists("key");
restoredFlowFile.assertAttributeNotExists("key1");
}
private void runNext(TestRunner runner) {
// Don't initialize, otherwise @OnScheduled is called and state gets reset
runner.run(1, false, false);
}
@Test
public void testFirstMessageWithInherit() throws InterruptedException, IOException {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(1000L));
runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "false");
runner.setProperty(MonitorActivity.THRESHOLD, "100 millis");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
runner.enqueue(new byte[0]);
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS, 1);
MockFlowFile originalFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS).get(0);
runner.clearTransferState();
Thread.sleep(1000L);
runNext(runner);
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE, 1);
runner.clearTransferState();
// ensure we don't keep creating the message
for (int i = 0; i < 10; i++) {
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
Thread.sleep(100L);
}
Map<String, String> attributes = new HashMap<>();
attributes.put("key", "value");
attributes.put("key1", "value1");
runner.enqueue(new byte[0], attributes);
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
MockFlowFile restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
String flowFileContent = new String(restoredFlowFile.toByteArray());
Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
restoredFlowFile.assertAttributeEquals("key", "value");
restoredFlowFile.assertAttributeEquals("key1", "value1");
// verify the UUIDs are not the same
restoredFlowFile.assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.getAttribute(CoreAttributes.UUID.key()));
restoredFlowFile.assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.getAttribute(CoreAttributes.FILENAME.key()));
Assert.assertTrue(
String.format("file sizes match when they shouldn't original=%1$s restored=%2$s",
originalFlowFile.getSize(), restoredFlowFile.getSize()), restoredFlowFile.getSize() != originalFlowFile.getSize());
Assert.assertTrue(
String.format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
originalFlowFile.getLineageStartDate(), restoredFlowFile.getLineageStartDate()), restoredFlowFile.getLineageStartDate() != originalFlowFile.getLineageStartDate());
runner.clearTransferState();
runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "true");
Thread.sleep(200L);
for (int i = 0; i < 10; i++) {
runNext(runner);
Thread.sleep(200L);
}
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 10);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.clearTransferState();
runner.enqueue(new byte[0], attributes);
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 1);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 1);
restoredFlowFile = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED).get(0);
flowFileContent = new String(restoredFlowFile.toByteArray());
Assert.assertTrue(Pattern.matches("Activity restored at time: (.*) after being inactive for 0 minutes", flowFileContent));
restoredFlowFile.assertAttributeEquals("key", "value");
restoredFlowFile.assertAttributeEquals("key1", "value1");
restoredFlowFile.assertAttributeNotEquals(CoreAttributes.UUID.key(), originalFlowFile.getAttribute(CoreAttributes.UUID.key()));
restoredFlowFile.assertAttributeNotEquals(CoreAttributes.FILENAME.key(), originalFlowFile.getAttribute(CoreAttributes.FILENAME.key()));
Assert.assertTrue(
String.format("file sizes match when they shouldn't original=%1$s restored=%2$s",
originalFlowFile.getSize(), restoredFlowFile.getSize()), restoredFlowFile.getSize() != originalFlowFile.getSize());
Assert.assertTrue(
String.format("lineage start dates match when they shouldn't original=%1$s restored=%2$s",
originalFlowFile.getLineageStartDate(), restoredFlowFile.getLineageStartDate()), restoredFlowFile.getLineageStartDate() != originalFlowFile.getLineageStartDate());
}
@Test(timeout=5000)
public void testFirstRunNoMessages() throws InterruptedException, IOException {
// don't use the TestableProcessor, we want the real timestamp from @OnScheduled
final TestRunner runner = TestRunners.newTestRunner(new MonitorActivity());
runner.setProperty(MonitorActivity.CONTINUALLY_SEND_MESSAGES, "false");
int threshold = 100;
boolean rerun = false;
do {
rerun = false;
runner.setProperty(MonitorActivity.THRESHOLD, threshold + " millis");
Thread.sleep(1000L);
// shouldn't generate inactivity b/c run() will reset the lastSuccessfulTransfer if @OnSchedule & onTrigger
// does not get called more than MonitorActivity.THRESHOLD apart
runner.run();
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
List<MockFlowFile> inactiveFlowFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_INACTIVE);
if (inactiveFlowFiles.size() == 1) {
// Seems Threshold was not sufficient, which has caused One inactive message.
// Step-up and rerun the test until successful or jUnit times out
threshold += threshold;
rerun = true;
} else {
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
}
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
runner.clearTransferState();
}
while(rerun);
}
/**
* Since each call to run() will call @OnScheduled methods which will set the lastSuccessfulTransfer to the
* current time, we need a way to create an artificial time difference between calls to run.
*/
private class TestableProcessor extends MonitorActivity {
private final long timestampDifference;
public TestableProcessor(final long timestampDifference) {
this.timestampDifference = timestampDifference;
}
@Override
public void resetLastSuccessfulTransfer() {
setLastSuccessfulTransfer(System.currentTimeMillis() - timestampDifference);
}
}
@Test
public void testClusterMonitorInvalidReportingNode() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_NODE);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.assertNotValid();
}
@Test
public void testClusterMonitorActive() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
// This has to be very small threshold, otherwise, MonitorActivity skip persisting state.
runner.setProperty(MonitorActivity.THRESHOLD, "1 ms");
runner.enqueue("Incoming data");
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS);
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNotNull("Latest timestamp should be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
// Should be null because COPY_ATTRIBUTES is null.
assertNull(updatedState.get("key1"));
assertNull(updatedState.get("key2"));
}
@Test
public void testClusterMonitorActiveFallbackToNodeScope() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(false);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
// This has to be very small threshold, otherwise, MonitorActivity skip persisting state.
runner.setProperty(MonitorActivity.THRESHOLD, "1 ms");
runner.enqueue("Incoming data");
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS);
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNull("Latest timestamp should NOT be persisted, because it's running as 'node' scope",
updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
}
@Test
public void testClusterMonitorActiveWithLatestTimestamp() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
// This has to be very small threshold, otherwise, MonitorActivity skip persisting state.
runner.setProperty(MonitorActivity.THRESHOLD, "1 ms");
runner.enqueue("Incoming data");
// Set future timestamp in state
final HashMap<String, String> existingState = new HashMap<>();
final long existingTimestamp = System.currentTimeMillis() - 1_000;
existingState.put(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER,
String.valueOf(existingTimestamp));
existingState.put("key1", "value1");
existingState.put("key2", "value2");
runner.getStateManager().setState(existingState, Scope.CLUSTER);
runner.getStateManager().replace(runner.getStateManager().getState(Scope.CLUSTER), existingState, Scope.CLUSTER);
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS);
final StateMap postProcessedState = runner.getStateManager().getState(Scope.CLUSTER);
assertTrue("Existing timestamp should be updated",
existingTimestamp < Long.parseLong(postProcessedState.get(
MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER)));
// State should be updated. Null in this case.
assertNull(postProcessedState.get("key1"));
assertNull(postProcessedState.get("key2"));
}
@Test
public void testClusterMonitorActiveMoreRecentTimestampExisted() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
// This has to be very small threshold, otherwise, MonitorActivity skip persisting state.
runner.setProperty(MonitorActivity.THRESHOLD, "1 ms");
runner.enqueue("Incoming data");
// Set future timestamp in state
final HashMap<String, String> existingState = new HashMap<>();
final long existingTimestamp = System.currentTimeMillis() + 10_000;
existingState.put(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER,
String.valueOf(existingTimestamp));
existingState.put("key1", "value1");
existingState.put("key2", "value2");
runner.getStateManager().setState(existingState, Scope.CLUSTER);
runner.getStateManager().replace(runner.getStateManager().getState(Scope.CLUSTER), existingState, Scope.CLUSTER);
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS);
final StateMap postProcessedState = runner.getStateManager().getState(Scope.CLUSTER);
assertEquals("Existing timestamp should NOT be updated",
String.valueOf(existingTimestamp),
postProcessedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
// State should stay the same.
assertEquals(postProcessedState.get("key1"), existingState.get("key1"));
assertEquals(postProcessedState.get("key2"), existingState.get("key2"));
}
@Test
public void testClusterMonitorActiveCopyAttribute() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
// This has to be very small threshold, otherwise, MonitorActivity skip persisting state.
runner.setProperty(MonitorActivity.THRESHOLD, "1 ms");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
final HashMap<String, String> attributes = new HashMap<>();
attributes.put("key1", "value1");
attributes.put("key2", "value2");
runner.enqueue("Incoming data", attributes);
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS);
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNotNull("Latest timestamp should be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
assertEquals("value1", updatedState.get("key1"));
assertEquals("value2", updatedState.get("key2"));
}
@Test
public void testClusterMonitorInactivity() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
final List<MockFlowFile> inactiveFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_INACTIVE);
assertEquals(1, inactiveFiles.size());
final MockFlowFile inactiveFile = inactiveFiles.get(0);
assertNotNull(inactiveFile.getAttribute("inactivityStartMillis"));
assertNotNull(inactiveFile.getAttribute("inactivityDurationMillis"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorInactivityFallbackToNodeScope() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(false);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
final List<MockFlowFile> inactiveFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_INACTIVE);
assertEquals(1, inactiveFiles.size());
final MockFlowFile inactiveFile = inactiveFiles.get(0);
assertNotNull(inactiveFile.getAttribute("inactivityStartMillis"));
assertNotNull(inactiveFile.getAttribute("inactivityDurationMillis"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorInactivityOnPrimaryNode() throws Exception {
final TestableProcessor processor = new TestableProcessor(TimeUnit.MINUTES.toMillis(120));
final TestRunner runner = TestRunners.newTestRunner(processor);
runner.setClustered(true);
runner.setPrimaryNode(true);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
final List<MockFlowFile> inactiveFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_INACTIVE);
assertEquals(1, inactiveFiles.size());
final MockFlowFile inactiveFile = inactiveFiles.get(0);
assertNotNull(inactiveFile.getAttribute("inactivityStartMillis"));
assertNotNull(inactiveFile.getAttribute("inactivityDurationMillis"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorInactivityOnNode() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive, but this not shouldn't send flow file
runner.run();
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredBySelf() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
runner.clearTransferState();
// Activity restored
final HashMap<String, String> attributes = new HashMap<>();
attributes.put("key1", "value1");
attributes.put("key2", "value2");
runner.enqueue("Incoming data", attributes);
runNext(runner);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS);
final List<MockFlowFile> activityRestoredFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED);
assertEquals(1, successFiles.size());
assertEquals(1, activityRestoredFiles.size());
assertEquals("value1", activityRestoredFiles.get(0).getAttribute("key1"));
assertEquals("value2", activityRestoredFiles.get(0).getAttribute("key2"));
// Latest activity should be persisted
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNotNull("Latest timestamp should be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
assertEquals("value1", updatedState.get("key1"));
assertEquals("value2", updatedState.get("key2"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredBySelfOnNode() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
// This node won't send notification files
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.clearTransferState();
// Activity restored
final HashMap<String, String> attributes = new HashMap<>();
attributes.put("key1", "value1");
attributes.put("key2", "value2");
runner.enqueue("Incoming data", attributes);
runNext(runner);
// This node should not send restored flow file
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_SUCCESS, 1);
// Latest activity should be persisted
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNotNull("Latest timestamp should be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
assertEquals("value1", updatedState.get("key1"));
assertEquals("value2", updatedState.get("key2"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredBySelfOnPrimaryNode() throws Exception {
final TestableProcessor processor = new TestableProcessor(TimeUnit.MINUTES.toMillis(120));
final TestRunner runner = TestRunners.newTestRunner(processor);
runner.setClustered(true);
runner.setPrimaryNode(true);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
runner.clearTransferState();
// Activity restored
final HashMap<String, String> attributes = new HashMap<>();
attributes.put("key1", "value1");
attributes.put("key2", "value2");
runner.enqueue("Incoming data", attributes);
runNext(runner);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS);
final List<MockFlowFile> activityRestoredFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED);
assertEquals(1, successFiles.size());
assertEquals(1, activityRestoredFiles.size());
assertEquals("value1", activityRestoredFiles.get(0).getAttribute("key1"));
assertEquals("value2", activityRestoredFiles.get(0).getAttribute("key2"));
// Latest activity should be persisted
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNotNull("Latest timestamp should be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
assertEquals("value1", updatedState.get("key1"));
assertEquals("value2", updatedState.get("key2"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredBySelfOnPrimaryNodeFallbackToNodeScope() throws Exception {
final TestableProcessor processor = new TestableProcessor(TimeUnit.MINUTES.toMillis(120));
final TestRunner runner = TestRunners.newTestRunner(processor);
runner.setClustered(false);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
runner.clearTransferState();
// Activity restored
final HashMap<String, String> attributes = new HashMap<>();
attributes.put("key1", "value1");
attributes.put("key2", "value2");
runner.enqueue("Incoming data", attributes);
runNext(runner);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS);
final List<MockFlowFile> activityRestoredFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED);
assertEquals(1, successFiles.size());
assertEquals(1, activityRestoredFiles.size());
assertEquals("value1", activityRestoredFiles.get(0).getAttribute("key1"));
assertEquals("value2", activityRestoredFiles.get(0).getAttribute("key2"));
// Latest activity should NOT be persisted
final StateMap updatedState = runner.getStateManager().getState(Scope.CLUSTER);
assertNull("Latest timestamp should NOT be persisted", updatedState.get(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredByOtherNode() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
runner.clearTransferState();
// Activity restored, even if this node doesn't have activity, other node updated the cluster state.
final HashMap<String, String> clusterState = new HashMap<>();
clusterState.put(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER, String.valueOf(System.currentTimeMillis()));
clusterState.put("key1", "value1");
clusterState.put("key2", "value2");
runner.getStateManager().setState(clusterState, Scope.CLUSTER);
runner.getStateManager().replace(runner.getStateManager().getState(Scope.CLUSTER), clusterState, Scope.CLUSTER);
runNext(runner);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS);
final List<MockFlowFile> activityRestoredFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED);
assertEquals("Should be zero since it doesn't have incoming file.", 0, successFiles.size());
assertEquals(1, activityRestoredFiles.size());
assertEquals("value1", activityRestoredFiles.get(0).getAttribute("key1"));
assertEquals("value2", activityRestoredFiles.get(0).getAttribute("key2"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredByOtherNodeOnPrimary() throws Exception {
final TestableProcessor processor = new TestableProcessor(TimeUnit.MINUTES.toMillis(120));
final TestRunner runner = TestRunners.newTestRunner(processor);
runner.setClustered(true);
runner.setPrimaryNode(true);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "1 hour");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertAllFlowFilesTransferred(MonitorActivity.REL_INACTIVE);
runner.clearTransferState();
// Activity restored, even if this node doesn't have activity, other node updated the cluster state.
final HashMap<String, String> clusterState = new HashMap<>();
clusterState.put(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER, String.valueOf(System.currentTimeMillis()));
clusterState.put("key1", "value1");
clusterState.put("key2", "value2");
runner.getStateManager().setState(clusterState, Scope.CLUSTER);
runner.getStateManager().replace(runner.getStateManager().getState(Scope.CLUSTER), clusterState, Scope.CLUSTER);
runNext(runner);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_SUCCESS);
final List<MockFlowFile> activityRestoredFiles = runner.getFlowFilesForRelationship(MonitorActivity.REL_ACTIVITY_RESTORED);
assertEquals("Should be zero since it doesn't have incoming file.", 0, successFiles.size());
assertEquals(1, activityRestoredFiles.size());
assertEquals("value1", activityRestoredFiles.get(0).getAttribute("key1"));
assertEquals("value2", activityRestoredFiles.get(0).getAttribute("key2"));
runner.clearTransferState();
}
@Test
public void testClusterMonitorActivityRestoredByOtherNodeOnNode() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(new TestableProcessor(TimeUnit.MINUTES.toMillis(120)));
runner.setClustered(true);
runner.setPrimaryNode(false);
runner.setProperty(MonitorActivity.MONITORING_SCOPE, MonitorActivity.SCOPE_CLUSTER);
runner.setProperty(MonitorActivity.REPORTING_NODE, MonitorActivity.REPORT_NODE_PRIMARY);
runner.setProperty(MonitorActivity.THRESHOLD, "3 mins");
runner.setProperty(MonitorActivity.COPY_ATTRIBUTES, "true");
// Becomes inactive
runner.run();
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.clearTransferState();
// Activity restored, even if this node doesn't have activity, other node updated the cluster state.
final HashMap<String, String> clusterState = new HashMap<>();
clusterState.put(MonitorActivity.STATE_KEY_LATEST_SUCCESS_TRANSFER, String.valueOf(System.currentTimeMillis()));
clusterState.put("key1", "value1");
clusterState.put("key2", "value2");
runner.getStateManager().setState(clusterState, Scope.CLUSTER);
runner.getStateManager().replace(runner.getStateManager().getState(Scope.CLUSTER), clusterState, Scope.CLUSTER);
runNext(runner);
runner.assertTransferCount(MonitorActivity.REL_SUCCESS, 0);
runner.assertTransferCount(MonitorActivity.REL_INACTIVE, 0);
runner.assertTransferCount(MonitorActivity.REL_ACTIVITY_RESTORED, 0);
runner.clearTransferState();
}
}
|
|
/* JOrbis
* Copyright (C) 2000 ymnk, JCraft,Inc.
*
* Written by: 2000 ymnk<ymnk@jcraft.com>
*
* Many thanks to
* Monty <monty@xiph.org> and
* The XIPHOPHORUS Company http://www.xiph.org/ .
* JOrbis has been based on their awesome works, Vorbis codec.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package com.jcraft.jorbis;
class Lpc{
// en/decode lookups
Drft fft=new Drft();;
int ln;
int m;
// Autocorrelation LPC coeff generation algorithm invented by
// N. Levinson in 1947, modified by J. Durbin in 1959.
// Input : n elements of time doamin data
// Output: m lpc coefficients, excitation energy
static float lpc_from_data(float[] data, float[] lpc,int n,int m){
float[] aut=new float[m+1];
float error;
int i,j;
// autocorrelation, p+1 lag coefficients
j=m+1;
while(j--!=0){
float d=0;
for(i=j;i<n;i++)d+=data[i]*data[i-j];
aut[j]=d;
}
// Generate lpc coefficients from autocorr values
error=aut[0];
/*
if(error==0){
for(int k=0; k<m; k++) lpc[k]=0.0f;
return 0;
}
*/
for(i=0;i<m;i++){
float r=-aut[i+1];
if(error==0){
for(int k=0; k<m; k++) lpc[k]=0.0f;
return 0;
}
// Sum up this iteration's reflection coefficient; note that in
// Vorbis we don't save it. If anyone wants to recycle this code
// and needs reflection coefficients, save the results of 'r' from
// each iteration.
for(j=0;j<i;j++)r-=lpc[j]*aut[i-j];
r/=error;
// Update LPC coefficients and total error
lpc[i]=r;
for(j=0;j<i/2;j++){
float tmp=lpc[j];
lpc[j]+=r*lpc[i-1-j];
lpc[i-1-j]+=r*tmp;
}
if(i%2!=0)lpc[j]+=lpc[j]*r;
error*=1.0-r*r;
}
// we need the error value to know how big an impulse to hit the
// filter with later
return error;
}
// Input : n element envelope spectral curve
// Output: m lpc coefficients, excitation energy
float lpc_from_curve(float[] curve, float[] lpc){
int n=ln;
float[] work=new float[n+n];
float fscale=(float)(.5/n);
int i,j;
// input is a real curve. make it complex-real
// This mixes phase, but the LPC generation doesn't care.
for(i=0;i<n;i++){
work[i*2]=curve[i]*fscale;
work[i*2+1]=0;
}
work[n*2-1]=curve[n-1]*fscale;
n*=2;
fft.backward(work);
// The autocorrelation will not be circular. Shift, else we lose
// most of the power in the edges.
for(i=0,j=n/2;i<n/2;){
float temp=work[i];
work[i++]=work[j];
work[j++]=temp;
}
return(lpc_from_data(work,lpc,n,m));
}
void init(int mapped, int m){
//memset(l,0,sizeof(lpc_lookup));
ln=mapped;
this.m=m;
// we cheat decoding the LPC spectrum via FFTs
fft.init(mapped*2);
}
void clear(){
fft.clear();
}
static float FAST_HYPOT(float a, float b){
return (float)Math.sqrt((a)*(a) + (b)*(b));
}
// One can do this the long way by generating the transfer function in
// the time domain and taking the forward FFT of the result. The
// results from direct calculation are cleaner and faster.
//
// This version does a linear curve generation and then later
// interpolates the log curve from the linear curve.
void lpc_to_curve(float[] curve, float[] lpc, float amp){
//memset(curve,0,sizeof(float)*l->ln*2);
for(int i=0; i<ln*2; i++)curve[i]=0.0f;
if(amp==0)return;
for(int i=0;i<m;i++){
curve[i*2+1]=lpc[i]/(4*amp);
curve[i*2+2]=-lpc[i]/(4*amp);
}
fft.backward(curve); // reappropriated ;-)
{
int l2=ln*2;
float unit=(float)(1./amp);
curve[0]=(float)(1./(curve[0]*2+unit));
for(int i=1;i<ln;i++){
float real=(curve[i]+curve[l2-i]);
float imag=(curve[i]-curve[l2-i]);
float a = real + unit;
curve[i] = (float)(1.0 / FAST_HYPOT(a, imag));
}
}
}
/*
// subtract or add an lpc filter to data. Vorbis doesn't actually use this.
static void lpc_residue(float[] coeff, float[] prime,int m,
float[] data, int n){
// in: coeff[0...m-1] LPC coefficients
// prime[0...m-1] initial values
// data[0...n-1] data samples
// out: data[0...n-1] residuals from LPC prediction
float[] work=new float[m+n];
float y;
if(prime==null){
for(int i=0;i<m;i++){
work[i]=0;
}
}
else{
for(int i=0;i<m;i++){
work[i]=prime[i];
}
}
for(int i=0;i<n;i++){
y=0;
for(int j=0;j<m;j++){
y-=work[i+j]*coeff[m-j-1];
}
work[i+m]=data[i];
data[i]-=y;
}
}
static void lpc_predict(float[] coeff, float[] prime,int m,
float[] data, int n){
// in: coeff[0...m-1] LPC coefficients
// prime[0...m-1] initial values (allocated size of n+m-1)
// data[0...n-1] residuals from LPC prediction
// out: data[0...n-1] data samples
int o,p;
float y;
float[] work=new float[m+n];
if(prime==null){
for(int i=0;i<m;i++){
work[i]=0.f;
}
}
else{
for(int i=0;i<m;i++){
work[i]=prime[i];
}
}
for(int i=0;i<n;i++){
y=data[i];
o=i;
p=m;
for(int j=0;j<m;j++){
y-=work[o++]*coeff[--p];
}
data[i]=work[o]=y;
}
}
*/
}
|
|
/**
* TestSimResultsAdaptor.java
*
* Author : Christopher K. Allen
* Since : Nov 19, 2013
*/
package xal.tools.beam.calc;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import xal.model.alg.EnvTrackerAdapt;
import xal.model.alg.ParticleTracker;
import xal.model.alg.TransferMapTracker;
import xal.model.probe.EnvelopeProbe;
import xal.model.probe.ParticleProbe;
import xal.model.probe.TransferMapProbe;
import xal.model.probe.traj.Trajectory;
import xal.model.probe.traj.TransferMapState;
import xal.sim.scenario.AlgorithmFactory;
import xal.sim.scenario.ProbeFactory;
import xal.sim.scenario.Scenario;
import xal.smf.Accelerator;
import xal.smf.AcceleratorSeq;
import xal.test.ResourceManager;
import xal.tools.beam.PhaseMatrix;
import xal.tools.beam.PhaseMatrix.IND;
import xal.tools.beam.PhaseVector;
import xal.tools.beam.Twiss;
import xal.tools.beam.Twiss3D;
import xal.tools.math.r3.R3;
/**
* Test cases for the <code>SimResultsAdaptor</code> class.
*
* @author Christopher K. Allen
* @since Nov 19, 2013
*/
public class TestCalculationsOnRings {
/*
* Global Constants
*/
/** Output file location */
static private String STR_OUTPUT = TestCalculationsOnRings.class.getName() + ".txt";
/** String identifier for accelerator sequence used in testing */
static private String STR_SEQ_ID = "Ring";
/*
* Global Attributes
*/
/** The file where we send the testing output */
private static FileWriter OWTR_OUTPUT;
/** Accelerator object used for testing */
private static Accelerator ACCEL_TEST;
/** Accelerator sequence used for testing */
private static AcceleratorSeq SEQ_TEST;
/** Accelerator sequence (online) model for testing */
private static Scenario MODEL_TEST;
/** Envelope probe for model testing */
private static EnvelopeProbe PROBE_ENV_TEST;
/** Particle probe for model testing */
private static ParticleProbe PROBE_PARTL_TEST;
/** Transfer map probe for model testing */
private static TransferMapProbe PROBE_XFER_TEST;
/*
* Global Methods
*/
/**
*
* @throws java.lang.Exception
*
* @author Christopher K. Allen
* @since Jul 16, 2012
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// ResourceManager.clearAllFileLocations();
try {
File fileOutput = xal.test.ResourceManager.getOutputFile(TestCalculationsOnRings.class, STR_OUTPUT);
OWTR_OUTPUT = new FileWriter(fileOutput);
ACCEL_TEST = ResourceManager.getTestAccelerator();
// ACCEL_TEST = XMLDataManager.loadDefaultAccelerator();
SEQ_TEST = ACCEL_TEST.findSequence(STR_SEQ_ID);
MODEL_TEST = Scenario.newScenarioFor(SEQ_TEST);
MODEL_TEST.setSynchronizationMode(Scenario.SYNC_MODE_DESIGN);
// Create and initialize the particle probe
ParticleTracker algPart = AlgorithmFactory.createParticleTracker(SEQ_TEST);
PROBE_PARTL_TEST = ProbeFactory.createParticleProbe(SEQ_TEST, algPart);
PROBE_PARTL_TEST.reset();
MODEL_TEST.setProbe(PROBE_PARTL_TEST);
MODEL_TEST.resync();
MODEL_TEST.run();
// System.out.println("\nParticleProbe Trajectory");
// Trajectory<ParticleProbeState> trjPart = (Trajectory<ParticleProbeState>) MODEL_TEST.getTrajectory();
// System.out.println(trjPart);
// Create and initialize transfer map probe
TransferMapTracker algXferMap = AlgorithmFactory.createTransferMapTracker(SEQ_TEST);
PROBE_XFER_TEST = ProbeFactory.getTransferMapProbe(SEQ_TEST, algXferMap );
PROBE_XFER_TEST.reset();
MODEL_TEST.setProbe(PROBE_XFER_TEST);
MODEL_TEST.resync();
MODEL_TEST.run();
// System.out.println("\nTransferMap Trajectory");
// Trajectory<TransferMapState> trjTrnsMap = (Trajectory<TransferMapState>) MODEL_TEST.getTrajectory();
// System.out.println(trjTrnsMap);
// Create and initialize the envelope probe
EnvTrackerAdapt algEnv = AlgorithmFactory.createEnvTrackerAdapt(SEQ_TEST);
PROBE_ENV_TEST = ProbeFactory.getEnvelopeProbe(SEQ_TEST, algEnv);
PROBE_ENV_TEST.reset();
MODEL_TEST.setProbe(PROBE_ENV_TEST);
MODEL_TEST.resync();
MODEL_TEST.run();
// System.out.println("\nEnvelopeProbe Trajectory");
// Trajectory<EnvelopeProbeState> trjEnv = (Trajectory<EnvelopeProbeState>) MODEL_TEST.getTrajectory();
// System.out.println(trjEnv);
} catch (Exception e) {
System.out.println( "Exception: " + e );
e.printStackTrace();
System.err.println("Unable to initial the static test resources");
Assert.fail();
}
}
/**
*
*
* @author Christopher K. Allen
* @since Nov 9, 2011
*/
@AfterClass
public static void commonCleanup() throws IOException {
OWTR_OUTPUT.flush();
OWTR_OUTPUT.close();
}
/*
* Local Attributes
*/
/** Calculation engine for ring parameters using transfer map states */
private CalculationsOnRings calXferRing;
/**
*
* @throws java.lang.Exception
*
* @author Christopher K. Allen
* @since May 3, 2011
*/
@Before
public void setUp() throws Exception {
this.calXferRing = new CalculationsOnRings( PROBE_XFER_TEST.getTrajectory() );
}
/*
* Tests
*/
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeFixedOrbit(xal.model.probe.traj.ProbeState)}.
* @throws IOException
*/
@Test
public void testComputeFixedOrbit() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nTransferMapTrajectory: computeFixedOrbit");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
for (TransferMapState state : trjXfer) {
PhaseVector vecPos = this.calXferRing.computeFixedOrbit(state);
OWTR_OUTPUT.write(state.getElementId() + ": " + vecPos.toString());
OWTR_OUTPUT.write("\n");
}
OWTR_OUTPUT.write("\n");
}
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeChromAberration(xal.model.probe.traj.ProbeState)}.
*/
@Test
public void testComputeChromaticAberration() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nTransferMapTrajectory: computeChromAberration");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
for (TransferMapState state : trjXfer) {
PhaseVector vecPos = this.calXferRing.computeChromAberration(state);
OWTR_OUTPUT.write(state.getElementId() + ": " + vecPos.toString());
OWTR_OUTPUT.write("\n");
}
OWTR_OUTPUT.write("\n");
}
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeTwissParameters(xal.model.probe.traj.ProbeState)}.
*/
@Test
public void testComputeTwissParameters() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nCalculationsOnRings: computeTwissParameters() and computeMatchedTwissAt()");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
for (TransferMapState state : trjXfer) {
Twiss[] arrTwissMt = this.calXferRing.computeMatchedTwissAt(state);
Twiss[] arrTwissAt = this.calXferRing.computeTwissParameters(state);
Twiss3D t3dMach = new Twiss3D(arrTwissMt);
Twiss3D t3dAt = new Twiss3D(arrTwissAt);
OWTR_OUTPUT.write(state.getElementId() + "\n");
OWTR_OUTPUT.write(" Generic = " + t3dAt.toString() + "\n");
OWTR_OUTPUT.write(" Matched = " + t3dMach.toString());
OWTR_OUTPUT.write("\n");
}
OWTR_OUTPUT.write("\n");
}
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeBetatronPhase(xal.model.probe.traj.ProbeState)}.
*/
@Test
public void testComputeBetatronPhase() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nTransferMapTrajectory: computeBetatronPhase");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
for (TransferMapState state : trjXfer) {
R3 vecPhase = this.calXferRing.computeBetatronPhase(state);
OWTR_OUTPUT.write(state.getElementId() + ": " + vecPhase.toString());
OWTR_OUTPUT.write("\n");
}
OWTR_OUTPUT.write("\n");
}
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeBetatronPhase(xal.model.probe.traj.ProbeState)}.
*/
@Test
public void testComputePhaseAdvance() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nComputationsOnRings: computePhaseAdvance");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
R3 vecPhsTot = R3.zero();
TransferMapState state1 = trjXfer.initialState();
for (TransferMapState state2 : trjXfer) {
R3 vecPhsAdv = this.calXferRing.computePhaseAdvanceBetween(state1, state2);
OWTR_OUTPUT.write(state1.getElementId() + "-" + state2.getElementId() + ": " + vecPhsAdv.toString());
OWTR_OUTPUT.write("\n");
state1 = state2;
vecPhsTot.plusEquals(vecPhsAdv);
}
OWTR_OUTPUT.write("Total phase advance = " + vecPhsTot);
OWTR_OUTPUT.write("\n");
OWTR_OUTPUT.write("\n");
}
/**
* Test method for {@link xal.tools.beam.calc.SimResultsAdaptor#computeChromDispersion(xal.model.probe.traj.ProbeState)}.
*/
@Test
public void testComputeChromDispersion() throws IOException {
// Do computations on the transfer map trajectory
OWTR_OUTPUT.write("\nTransferMapTrajectory: computeChromDispersion");
OWTR_OUTPUT.write("\n");
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
for (TransferMapState state : trjXfer) {
PhaseVector vecPhase = this.calXferRing.computeChromDispersion(state);
OWTR_OUTPUT.write(state.getElementId() + ": " + vecPhase.toString());
OWTR_OUTPUT.write("\n");
}
OWTR_OUTPUT.write("\n");
}
/**
* Test method for computing ring tunes.
*
* @throws IOException
*
* @author Christopher K. Allen
* @since Nov 5, 2014
*/
@Test
public void testComputeRingTunes() throws IOException {
// Compute the ring tunes
OWTR_OUTPUT.write("\nRing Computation: computeFractionalTunes");
OWTR_OUTPUT.write("\n");
R3 vecFracTunes = this.calXferRing.computeFractionalTunes();
OWTR_OUTPUT.write(" fraction tunes: " + vecFracTunes.toString());
OWTR_OUTPUT.write("\n");
OWTR_OUTPUT.write("\nRing Computation: computeFullTunes");
OWTR_OUTPUT.write("\n");
R3 vecFullTunes = this.calXferRing.computeFullTunes();
OWTR_OUTPUT.write(" full tunes: " + vecFullTunes.toString());
OWTR_OUTPUT.write("\n");
// OWTR_OUTPUT.write("\nRing Computation: computeFullTunes via integration");
// OWTR_OUTPUT.write("\n");
// vecFullTunes = this.calXferRing.computeFullTunes_integration();
// OWTR_OUTPUT.write(" full tunes: " + vecFullTunes.toString());
// OWTR_OUTPUT.write("\n");
//
OWTR_OUTPUT.write("\n");
}
/**
* Test the turn-by-turn computations of the ring calculation engine.
*
* @throws IOException
*
* @author Christopher K. Allen
* @since Nov 5, 2014
*/
@Test
public void testTurnByTurnResponse() throws IOException {
String strElemId1 = "Ring_Inj:Foil";
String strElemId2 = "Begin_Of_Ring1";
int cntTurns = 50;
PhaseVector vecInit = new PhaseVector(0.00,0, 0,0, 0,0);
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
TransferMapState state1 = trjXfer.stateForElement(strElemId1);
TransferMapState state2 = trjXfer.stateForElement(strElemId2);
// Compute the ring tunes
OWTR_OUTPUT.write("\nRing Computation: computeTurnByTurnResponse");
OWTR_OUTPUT.write("\n Injection location " + state1.getElementId());
OWTR_OUTPUT.write("\n Observation location " + state2.getElementId());
OWTR_OUTPUT.write("\n");
int cnt = 0;
PhaseVector[] arrVecRsp = this.calXferRing.computeTurnByTurnResponse(state1, state2, cntTurns, vecInit);
for (PhaseVector vecRsp : arrVecRsp) {
OWTR_OUTPUT.write("\n " + cnt + " coordinates: " + vecRsp.toString());
cnt++;
}
OWTR_OUTPUT.write("\n");
}
/**
* Do some experiments with fixed orbit vectors
* @throws IOException
*
* @author Christopher K. Allen
* @since Nov 6, 2014
*/
@Test
public void testFixedPointOrbit() throws IOException {
Trajectory<TransferMapState> trjXfer = PROBE_XFER_TEST.getTrajectory();
String strElemId = "Ring_Inj:Foil";
TransferMapState state = trjXfer.stateForElement(strElemId);
PhaseVector vecFxdOrb = this.calXferRing.computeFixedOrbit(state);
PhaseMatrix matFull = this.calXferRing.computeRingFullTurnMatrixAt(state);
OWTR_OUTPUT.write("\nFull Turn Matrix \n");
OWTR_OUTPUT.write(matFull.toStringMatrix());
OWTR_OUTPUT.write("\n");
OWTR_OUTPUT.write("\nProjected Full Turn Matrix \n");
OWTR_OUTPUT.write(matFull.projectR6x6().toStringMatrix());
OWTR_OUTPUT.write("\n");
OWTR_OUTPUT.write("\nProjected Displacement Vector \n");
OWTR_OUTPUT.write(matFull.projectColumn(IND.HOM).toString());
OWTR_OUTPUT.write("\n");
PhaseVector vec1 = matFull.times( vecFxdOrb );
PhaseVector vec2 = matFull.inverse().times( vec1 );
PhaseVector vec3 = matFull.solve( vec1 );
OWTR_OUTPUT.write("\nRing Fixed Point Experiments ");
OWTR_OUTPUT.write("\n Fixed point vector : " + vecFxdOrb.toString());
OWTR_OUTPUT.write("\n After one turn : " + vec1.toString());
OWTR_OUTPUT.write("\n Applying inverse : " + vec2.toString());
OWTR_OUTPUT.write("\n Using linear solve : " + vec3.toString());
OWTR_OUTPUT.write("\n");
}
@Test
public void testLinearSolve() {
}
}
|
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* The snapshot copy grant that grants Amazon Redshift permission to encrypt copied snapshots with the specified
* customer master key (CMK) from AWS KMS in the destination region.
* </p>
* <p>
* For more information about managing snapshot copy grants, go to <a
* href="http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-db-encryption.html">Amazon Redshift Database
* Encryption</a> in the <i>Amazon Redshift Cluster Management Guide</i>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/SnapshotCopyGrant" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SnapshotCopyGrant implements Serializable, Cloneable {
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*/
private String snapshotCopyGrantName;
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*/
private String kmsKeyId;
/**
* <p>
* A list of tag instances.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @param snapshotCopyGrantName
* The name of the snapshot copy grant.
*/
public void setSnapshotCopyGrantName(String snapshotCopyGrantName) {
this.snapshotCopyGrantName = snapshotCopyGrantName;
}
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @return The name of the snapshot copy grant.
*/
public String getSnapshotCopyGrantName() {
return this.snapshotCopyGrantName;
}
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @param snapshotCopyGrantName
* The name of the snapshot copy grant.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withSnapshotCopyGrantName(String snapshotCopyGrantName) {
setSnapshotCopyGrantName(snapshotCopyGrantName);
return this;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @param kmsKeyId
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
*/
public void setKmsKeyId(String kmsKeyId) {
this.kmsKeyId = kmsKeyId;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @return The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
*/
public String getKmsKeyId() {
return this.kmsKeyId;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @param kmsKeyId
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withKmsKeyId(String kmsKeyId) {
setKmsKeyId(kmsKeyId);
return this;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @return A list of tag instances.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @param tags
* A list of tag instances.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* A list of tag instances.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* A list of tag instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @param tags
* A list of tag instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSnapshotCopyGrantName() != null)
sb.append("SnapshotCopyGrantName: ").append(getSnapshotCopyGrantName()).append(",");
if (getKmsKeyId() != null)
sb.append("KmsKeyId: ").append(getKmsKeyId()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SnapshotCopyGrant == false)
return false;
SnapshotCopyGrant other = (SnapshotCopyGrant) obj;
if (other.getSnapshotCopyGrantName() == null ^ this.getSnapshotCopyGrantName() == null)
return false;
if (other.getSnapshotCopyGrantName() != null && other.getSnapshotCopyGrantName().equals(this.getSnapshotCopyGrantName()) == false)
return false;
if (other.getKmsKeyId() == null ^ this.getKmsKeyId() == null)
return false;
if (other.getKmsKeyId() != null && other.getKmsKeyId().equals(this.getKmsKeyId()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSnapshotCopyGrantName() == null) ? 0 : getSnapshotCopyGrantName().hashCode());
hashCode = prime * hashCode + ((getKmsKeyId() == null) ? 0 : getKmsKeyId().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public SnapshotCopyGrant clone() {
try {
return (SnapshotCopyGrant) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package org.jivesoftware.openfire.admin;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.clearspace.ClearspaceManager;
import org.jivesoftware.openfire.session.ComponentSession;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.StringUtils;
import java.text.NumberFormat;
import java.util.Collection;
import java.util.Date;
public final class clearspace_002dstatus_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.List _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_fmt_message_key_nobody;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_fmt_message_key;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_fmt_param_value_nobody;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_jspx_tagPool_fmt_message_key_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_fmt_message_key = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_jspx_tagPool_fmt_param_value_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
}
public void _jspDestroy() {
_jspx_tagPool_fmt_message_key_nobody.release();
_jspx_tagPool_fmt_message_key.release();
_jspx_tagPool_fmt_param_value_nobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
"error.jsp", true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\n\n\n\n\n\n\n\n\n\n\n\n\n\n");
org.jivesoftware.util.WebManager webManager = null;
synchronized (_jspx_page_context) {
webManager = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("webManager", PageContext.PAGE_SCOPE);
if (webManager == null){
webManager = new org.jivesoftware.util.WebManager();
_jspx_page_context.setAttribute("webManager", webManager, PageContext.PAGE_SCOPE);
}
}
out.write('\n');
webManager.init(request, response, session, application, out );
out.write('\n');
out.write('\n');
boolean test = request.getParameter("test") != null;
boolean configure = request.getParameter("configure") != null;
String testPage = "setup/setup-clearspace-integration_test.jsp";
ClearspaceManager manager = ClearspaceManager.getInstance();
boolean configured = false;
// Checks if CS and OF are currently connected
boolean connectedCS = manager.isClearspaceConnected();
boolean connectedOF = manager.isOpenfireConnected();
// If OF is connected to CS and there is a configure action, configure clearspace
if (connectedOF && configure) {
configured = manager.configClearspace();
}
// This fields will hold the status information of the connection
Date creationDate = null;
Date lastActivity = null;
int numServerPackets = 0;
int numClientPackets = 0;
int numComponents = 0;
Collection<ComponentSession> componentSessions = null;
// If connected collects stats from Clearspace sessions
if (connectedCS && connectedOF) {
SessionManager sessionManager = webManager.getSessionManager();
componentSessions = sessionManager.getComponentSessions();
for (ComponentSession cs : componentSessions) {
// All Clearspace sessions start with "clearspace"
if (cs.getAddress().getDomain().startsWith("clearspace")) {
if (creationDate == null || cs.getCreationDate().before(creationDate)) {
creationDate = cs.getCreationDate();
}
if (lastActivity == null || cs.getLastActiveDate().after(lastActivity)) {
lastActivity = cs.getLastActiveDate();
}
numClientPackets += cs.getNumClientPackets();
numServerPackets += cs.getNumServerPackets();
numComponents++;
break;
}
}
}
// Number dateFormatter for all numbers on this page:
NumberFormat numFormatter = NumberFormat.getNumberInstance();
out.write("\n\n<html>\n<head>\n<title>");
if (_jspx_meth_fmt_message_0(_jspx_page_context))
return;
out.write("</title>\n<meta name=\"pageID\" content=\"clearspace-status\"/>\n\n<style type=\"text/css\" title=\"setupStyle\" media=\"screen\">\n @import \"style/lightbox.css\";\n @import \"style/ldap.css\";\n</style>\n\n<script language=\"JavaScript\" type=\"text/javascript\" src=\"js/prototype.js\"></script>\n<script language=\"JavaScript\" type=\"text/javascript\" src=\"js/scriptaculous.js\"></script>\n<script language=\"JavaScript\" type=\"text/javascript\" src=\"js/lightbox.js\"></script>\n<script language=\"javascript\" type=\"text/javascript\" src=\"js/tooltips/domLib.js\"></script>\n<script language=\"javascript\" type=\"text/javascript\" src=\"js/tooltips/domTT.js\"></script>\n<script src=\"dwr/engine.js\" type=\"text/javascript\"></script>\n<script src=\"dwr/util.js\" type=\"text/javascript\"></script>\n</head>\n\n<body>\n\n");
if (test) {
out.write("\n\n <a href=\"");
out.print( testPage);
out.write("\" id=\"lbmessage\" title=\"");
if (_jspx_meth_fmt_message_1(_jspx_page_context))
return;
out.write("\" style=\"display:none;\"></a>\n <script type=\"text/javascript\">\n function loadMsg() {\n var lb = new lightbox(document.getElementById('lbmessage'));\n lb.activate();\n }\n setTimeout('loadMsg()', 250);\n </script>\n\n");
}
out.write('\n');
out.write('\n');
if (configure && !configured) {
out.write("\n\n<div class=\"error\">\n ");
if (_jspx_meth_fmt_message_2(_jspx_page_context))
return;
out.write("\n</div>\n\n");
}
out.write('\n');
out.write('\n');
if (connectedCS && connectedOF) {
out.write("\n<p>\n");
if (_jspx_meth_fmt_message_3(_jspx_page_context))
return;
out.write("\n</p>\n<div class=\"jive-table\">\n<table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n<thead>\n <tr>\n <th colspan=\"2\">\n ");
if (_jspx_meth_fmt_message_4(_jspx_page_context))
return;
out.write("\n </th>\n </tr>\n</thead>\n<tbody>\n <tr>\n <td class=\"c1\">\n ");
if (_jspx_meth_fmt_message_5(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
if (_jspx_meth_fmt_message_6(_jspx_page_context))
return;
out.write("\n </td>\n </tr>\n ");
if (numComponents > 1) {
out.write("\n <tr>\n <td class=\"c1\">\n ");
if (_jspx_meth_fmt_message_7(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
out.print( numFormatter.format(numComponents) );
out.write("\n </td>\n </tr>\n ");
}
out.write("\n <tr>\n <td class=\"c1\">\n ");
if (_jspx_meth_fmt_message_8(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
out.print( JiveGlobals.formatDateTime(creationDate) );
out.write("\n </td>\n </tr>\n <tr>\n <td class=\"c1\">\n ");
if (_jspx_meth_fmt_message_9(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
out.print( JiveGlobals.formatDateTime(lastActivity) );
out.write("\n </td>\n </tr>\n <tr>\n <td class=\"c1\">\n ");
if (_jspx_meth_fmt_message_10(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
if (_jspx_meth_fmt_message_11(_jspx_page_context))
return;
out.write("\n ");
out.print( numFormatter.format(numClientPackets) );
out.write('/');
out.print( numFormatter.format(numServerPackets) );
out.write("\n </td>\n </tr>\n ");
boolean first = true;
for (ComponentSession cs : componentSessions) {
if (first) {
first = false;
out.write("\n <tr>\n <td rowsapn=\"");
out.print( componentSessions.size() );
out.write("\" class=\"c1\">\n ");
if (_jspx_meth_fmt_message_12(_jspx_page_context))
return;
out.write("\n </td>\n <td>\n ");
out.print( StringUtils.escapeHTMLTags(cs.getHostAddress()) );
out.write("\n /\n ");
out.print( StringUtils.escapeHTMLTags(cs.getHostName()) );
out.write("\n </td>\n </tr>\n ");
} else {
out.write("\n <tr>\n <td>\n ");
out.print( StringUtils.escapeHTMLTags(cs.getHostAddress()) );
out.write("\n /\n ");
out.print( StringUtils.escapeHTMLTags(cs.getHostName()) );
out.write("\n </td>\n </tr>\n ");
}
out.write("\n</tbody>\n</table>\n</div>\n\n ");
}
out.write('\n');
out.write('\n');
out.write('\n');
} else {
out.write("\n\n ");
if (!connectedCS && !connectedOF) {
out.write("\n<div class=\"error\">\n ");
if (_jspx_meth_fmt_message_13(_jspx_page_context))
return;
out.write("\n</div>\n\n<p>\n");
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_14 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_14.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_14.setParent(null);
_jspx_th_fmt_message_14.setKey("clearspace.status.disconnected.of_and_cs.description");
int _jspx_eval_fmt_message_14 = _jspx_th_fmt_message_14.doStartTag();
if (_jspx_eval_fmt_message_14 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_fmt_message_14 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_fmt_message_14.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_fmt_message_14.doInitBody();
}
do {
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_0 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_0.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_14);
_jspx_th_fmt_param_0.setValue( "<a href='clearspace-integration.jsp'>" );
int _jspx_eval_fmt_param_0 = _jspx_th_fmt_param_0.doStartTag();
if (_jspx_th_fmt_param_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_0);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_0);
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_1 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_1.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_14);
_jspx_th_fmt_param_1.setValue( "</a>" );
int _jspx_eval_fmt_param_1 = _jspx_th_fmt_param_1.doStartTag();
if (_jspx_th_fmt_param_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_1);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_1);
out.write('\n');
int evalDoAfterBody = _jspx_th_fmt_message_14.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_fmt_message_14 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE)
out = _jspx_page_context.popBody();
}
if (_jspx_th_fmt_message_14.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_14);
return;
}
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_14);
out.write("\n\n ");
} else if (!connectedCS) {
out.write("\n<div class=\"error\">\n ");
if (_jspx_meth_fmt_message_15(_jspx_page_context))
return;
out.write("\n</div>\n\n<p>\n");
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_16 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_16.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_16.setParent(null);
_jspx_th_fmt_message_16.setKey("clearspace.status.disconnected.cs.description");
int _jspx_eval_fmt_message_16 = _jspx_th_fmt_message_16.doStartTag();
if (_jspx_eval_fmt_message_16 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_fmt_message_16 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_fmt_message_16.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_fmt_message_16.doInitBody();
}
do {
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_2 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_2.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_2.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_16);
_jspx_th_fmt_param_2.setValue( "<a href='clearspace-integration.jsp'>" );
int _jspx_eval_fmt_param_2 = _jspx_th_fmt_param_2.doStartTag();
if (_jspx_th_fmt_param_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_2);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_2);
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_3 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_3.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_3.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_16);
_jspx_th_fmt_param_3.setValue( "</a>" );
int _jspx_eval_fmt_param_3 = _jspx_th_fmt_param_3.doStartTag();
if (_jspx_th_fmt_param_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_3);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_3);
out.write('\n');
int evalDoAfterBody = _jspx_th_fmt_message_16.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_fmt_message_16 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE)
out = _jspx_page_context.popBody();
}
if (_jspx_th_fmt_message_16.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_16);
return;
}
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_16);
out.write("\n ");
} else if (!connectedOF) {
out.write("\n<div class=\"error\">\n ");
if (_jspx_meth_fmt_message_17(_jspx_page_context))
return;
out.write("\n</div>\n\n<p>\n");
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_18 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_18.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_18.setParent(null);
_jspx_th_fmt_message_18.setKey("clearspace.status.disconnected.of.description");
int _jspx_eval_fmt_message_18 = _jspx_th_fmt_message_18.doStartTag();
if (_jspx_eval_fmt_message_18 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) {
if (_jspx_eval_fmt_message_18 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) {
out = _jspx_page_context.pushBody();
_jspx_th_fmt_message_18.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out);
_jspx_th_fmt_message_18.doInitBody();
}
do {
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_4 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_4.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_4.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_18);
_jspx_th_fmt_param_4.setValue( "<a href='clearspace-integration.jsp'>" );
int _jspx_eval_fmt_param_4 = _jspx_th_fmt_param_4.doStartTag();
if (_jspx_th_fmt_param_4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_4);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_4);
out.write("\n ");
// fmt:param
org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_param_5 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _jspx_tagPool_fmt_param_value_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class);
_jspx_th_fmt_param_5.setPageContext(_jspx_page_context);
_jspx_th_fmt_param_5.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_message_18);
_jspx_th_fmt_param_5.setValue( "</a>" );
int _jspx_eval_fmt_param_5 = _jspx_th_fmt_param_5.doStartTag();
if (_jspx_th_fmt_param_5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_5);
return;
}
_jspx_tagPool_fmt_param_value_nobody.reuse(_jspx_th_fmt_param_5);
out.write('\n');
int evalDoAfterBody = _jspx_th_fmt_message_18.doAfterBody();
if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN)
break;
} while (true);
if (_jspx_eval_fmt_message_18 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE)
out = _jspx_page_context.popBody();
}
if (_jspx_th_fmt_message_18.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_18);
return;
}
_jspx_tagPool_fmt_message_key.reuse(_jspx_th_fmt_message_18);
out.write("\n ");
}
out.write("\n<p>\n");
if (_jspx_meth_fmt_message_19(_jspx_page_context))
return;
out.write("\n</p>\n<form action=\"clearspace-status.jsp\" method=\"post\">\n <!-- BEGIN jive-buttons -->\n <div class=\"jive-buttons\">\n\n <!-- BEGIN right-aligned buttons -->\n <div align=\"left\">\n\n <input type=\"Submit\" name=\"test\" value=\"");
if (_jspx_meth_fmt_message_20(_jspx_page_context))
return;
out.write("\" id=\"jive-clearspace-test\" border=\"0\">\n\n <input type=\"Submit\" name=\"configure\" value=\"");
if (_jspx_meth_fmt_message_21(_jspx_page_context))
return;
out.write("\" id=\"jive-clearspace-configure\" border=\"0\">\n </div>\n <!-- END right-aligned buttons -->\n\n </div>\n <!-- END jive-buttons -->\n\n</form>\n\n");
}
out.write("\n\n</body>\n</html>\n");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_message_0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_0.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_0.setParent(null);
_jspx_th_fmt_message_0.setKey("clearspace.status.title");
int _jspx_eval_fmt_message_0 = _jspx_th_fmt_message_0.doStartTag();
if (_jspx_th_fmt_message_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return false;
}
private boolean _jspx_meth_fmt_message_1(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_1.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_1.setParent(null);
_jspx_th_fmt_message_1.setKey("global.test");
int _jspx_eval_fmt_message_1 = _jspx_th_fmt_message_1.doStartTag();
if (_jspx_th_fmt_message_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return false;
}
private boolean _jspx_meth_fmt_message_2(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_2.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_2.setParent(null);
_jspx_th_fmt_message_2.setKey("clearspace.status.error.config");
int _jspx_eval_fmt_message_2 = _jspx_th_fmt_message_2.doStartTag();
if (_jspx_th_fmt_message_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return false;
}
private boolean _jspx_meth_fmt_message_3(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_3.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_3.setParent(null);
_jspx_th_fmt_message_3.setKey("clearspace.status.connected.description");
int _jspx_eval_fmt_message_3 = _jspx_th_fmt_message_3.doStartTag();
if (_jspx_th_fmt_message_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return false;
}
private boolean _jspx_meth_fmt_message_4(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_4.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_4.setParent(null);
_jspx_th_fmt_message_4.setKey("clearspace.status.connected.table.title");
int _jspx_eval_fmt_message_4 = _jspx_th_fmt_message_4.doStartTag();
if (_jspx_th_fmt_message_4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return false;
}
private boolean _jspx_meth_fmt_message_5(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_5.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_5.setParent(null);
_jspx_th_fmt_message_5.setKey("clearspace.status.connected.table.label.connected");
int _jspx_eval_fmt_message_5 = _jspx_th_fmt_message_5.doStartTag();
if (_jspx_th_fmt_message_5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return false;
}
private boolean _jspx_meth_fmt_message_6(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_6.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_6.setParent(null);
_jspx_th_fmt_message_6.setKey("clearspace.status.connected.table.value.connected");
int _jspx_eval_fmt_message_6 = _jspx_th_fmt_message_6.doStartTag();
if (_jspx_th_fmt_message_6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return false;
}
private boolean _jspx_meth_fmt_message_7(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_7 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_7.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_7.setParent(null);
_jspx_th_fmt_message_7.setKey("clearspace.status.connected.table.label.num_components");
int _jspx_eval_fmt_message_7 = _jspx_th_fmt_message_7.doStartTag();
if (_jspx_th_fmt_message_7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_7);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_7);
return false;
}
private boolean _jspx_meth_fmt_message_8(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_8 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_8.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_8.setParent(null);
_jspx_th_fmt_message_8.setKey("clearspace.status.connected.table.label.creation");
int _jspx_eval_fmt_message_8 = _jspx_th_fmt_message_8.doStartTag();
if (_jspx_th_fmt_message_8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_8);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_8);
return false;
}
private boolean _jspx_meth_fmt_message_9(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_9 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_9.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_9.setParent(null);
_jspx_th_fmt_message_9.setKey("clearspace.status.connected.table.label.last_active");
int _jspx_eval_fmt_message_9 = _jspx_th_fmt_message_9.doStartTag();
if (_jspx_th_fmt_message_9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_9);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_9);
return false;
}
private boolean _jspx_meth_fmt_message_10(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_10 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_10.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_10.setParent(null);
_jspx_th_fmt_message_10.setKey("clearspace.status.connected.table.label.statistics");
int _jspx_eval_fmt_message_10 = _jspx_th_fmt_message_10.doStartTag();
if (_jspx_th_fmt_message_10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_10);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_10);
return false;
}
private boolean _jspx_meth_fmt_message_11(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_11 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_11.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_11.setParent(null);
_jspx_th_fmt_message_11.setKey("clearspace.status.connected.table.label.received");
int _jspx_eval_fmt_message_11 = _jspx_th_fmt_message_11.doStartTag();
if (_jspx_th_fmt_message_11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_11);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_11);
return false;
}
private boolean _jspx_meth_fmt_message_12(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_12 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_12.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_12.setParent(null);
_jspx_th_fmt_message_12.setKey("clearspace.status.connected.table.label.hostname");
int _jspx_eval_fmt_message_12 = _jspx_th_fmt_message_12.doStartTag();
if (_jspx_th_fmt_message_12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_12);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_12);
return false;
}
private boolean _jspx_meth_fmt_message_13(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_13 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_13.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_13.setParent(null);
_jspx_th_fmt_message_13.setKey("clearspace.status.error.disconnected.of_and_cs");
int _jspx_eval_fmt_message_13 = _jspx_th_fmt_message_13.doStartTag();
if (_jspx_th_fmt_message_13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_13);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_13);
return false;
}
private boolean _jspx_meth_fmt_message_15(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_15 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_15.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_15.setParent(null);
_jspx_th_fmt_message_15.setKey("clearspace.status.error.disconnected.cs");
int _jspx_eval_fmt_message_15 = _jspx_th_fmt_message_15.doStartTag();
if (_jspx_th_fmt_message_15.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_15);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_15);
return false;
}
private boolean _jspx_meth_fmt_message_17(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_17 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_17.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_17.setParent(null);
_jspx_th_fmt_message_17.setKey("clearspace.status.error.disconnected.of");
int _jspx_eval_fmt_message_17 = _jspx_th_fmt_message_17.doStartTag();
if (_jspx_th_fmt_message_17.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_17);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_17);
return false;
}
private boolean _jspx_meth_fmt_message_19(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_19 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_19.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_19.setParent(null);
_jspx_th_fmt_message_19.setKey("clearspace.status.disconnected.buttons.description");
int _jspx_eval_fmt_message_19 = _jspx_th_fmt_message_19.doStartTag();
if (_jspx_th_fmt_message_19.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_19);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_19);
return false;
}
private boolean _jspx_meth_fmt_message_20(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_20 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_20.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_20.setParent(null);
_jspx_th_fmt_message_20.setKey("clearspace.status.disconnected.testbutton");
int _jspx_eval_fmt_message_20 = _jspx_th_fmt_message_20.doStartTag();
if (_jspx_th_fmt_message_20.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_20);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_20);
return false;
}
private boolean _jspx_meth_fmt_message_21(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_message_21 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_message_21.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_21.setParent(null);
_jspx_th_fmt_message_21.setKey("clearspace.status.disconnected.configbutton");
int _jspx_eval_fmt_message_21 = _jspx_th_fmt_message_21.doStartTag();
if (_jspx_th_fmt_message_21.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_21);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_21);
return false;
}
}
|
|
/*
author:huydx
github:https://github.com/huydx
*/
package com.ktmt.vlcamera.custom;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Stack;
import com.ktmt.vlcamera.model.BitmapOperationMap;
import com.ktmt.vlcamera.model.DraggableBitmap;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.*;
import android.widget.ImageView;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.PointF;
import android.graphics.RectF;
public class DraggableImageView extends ImageView {
// some private variable use for detect multi touch
public enum EDITMODE {
NONE, DRAG, ZOOM, ROTATE
}
private static final String TAG = "Draggable Bitmap";
private boolean mDrawOpacityBackground = false;
private Paint mPaint = new Paint();
private DraggableBitmap mActiveBitmap = null;
private RectF mInnerImageBounds = null;
private Stack<BitmapOperationMap> mOperationStack = new Stack<BitmapOperationMap>();
// list of stamp bitmaps
private List<DraggableBitmap> mOverlayBitmaps;
// constructors
public DraggableImageView(Context context) {
super(context);
initMembers();
this.setOnTouchListener(touchListener);
}
public DraggableImageView(Context context, AttributeSet attrs) {
super(context, attrs);
initMembers();
this.setOnTouchListener(touchListener);
}
private void initMembers() {
mOverlayBitmaps = new ArrayList<DraggableBitmap>();
}
// listeners
private OnTouchListener touchListener = new OnTouchListener() {
// to get mode [drag, zoom, rotate]
private EDITMODE mEditMode = EDITMODE.NONE;
private float[] mLastEvent;
private PointF mStart = new PointF();
private PointF mMid = new PointF();
private float mOldDistance;
private float mNewRotation = 0f;
private float mDist = 0f;
// this variable use to deal with android odd touch behavior (MOVE -> UP
// -> MOVE -> UP)
private boolean touchMoveEndChecker = false;
@Override
public boolean onTouch(View v, MotionEvent event) {
// switch finger events
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case (MotionEvent.ACTION_DOWN):
touchMoveEndChecker = true;
mDrawOpacityBackground = true;
int activebmpIdx = getActiveBitmap(event.getX(), event.getY());
if (activebmpIdx != -1) {
mActiveBitmap = mOverlayBitmaps.get(activebmpIdx);
rearrangeOverlayList();
}
else {
mActiveBitmap = null;
break;
}
mLastEvent = null;
mEditMode = EDITMODE.DRAG;
mStart.set(event.getX(), event.getY());
if (mActiveBitmap != null) {
mActiveBitmap.setSavedMatrix(mActiveBitmap.getCurrentMatrix());
}
break;
case (MotionEvent.ACTION_POINTER_DOWN):
touchMoveEndChecker = false;
mDrawOpacityBackground = true;
if (mActiveBitmap != null) {
mOldDistance = spacing(event);
if (mOldDistance > 10f) {
mActiveBitmap.setSavedMatrix(mActiveBitmap.getCurrentMatrix());
midPoint(mMid, event);
mEditMode = EDITMODE.ZOOM;
}
mLastEvent = new float[4];
mLastEvent[0] = event.getX(0);
mLastEvent[1] = event.getX(1);
mLastEvent[2] = event.getY(0);
mLastEvent[3] = event.getY(1);
mDist = rotation(event);
}
break;
case (MotionEvent.ACTION_POINTER_UP):
mEditMode = EDITMODE.NONE;
break;
case (MotionEvent.ACTION_MOVE):
touchMoveEndChecker = false;
mDrawOpacityBackground = true;
if (mActiveBitmap != null) {
if (mEditMode == EDITMODE.DRAG) {
mActiveBitmap.setCurrentMatrix(mActiveBitmap.getSavedMatrix());
mActiveBitmap.getCurrentMatrix().postTranslate(event.getX() - mStart.x,
event.getY() - mStart.y);
} else if (mEditMode == EDITMODE.ZOOM && event.getPointerCount() == 2) {
float newDistance = spacing(event);
mActiveBitmap.setCurrentMatrix(mActiveBitmap.getSavedMatrix());
if (newDistance > 10f) {
float scale = newDistance / mOldDistance;
mActiveBitmap.getCurrentMatrix()
.postScale(scale, scale, mMid.x, mMid.y);
}
if (mLastEvent != null) {
mNewRotation = rotation(event);
float r = mNewRotation - mDist;
RectF rec = new RectF(0, 0, mActiveBitmap.mBitmap.getWidth(),
mActiveBitmap.mBitmap.getHeight());
mActiveBitmap.getCurrentMatrix().mapRect(rec);
mActiveBitmap.getCurrentMatrix().postRotate(r,
rec.left + rec.width() / 2, rec.top + rec.height() / 2);
}
}
}
case (MotionEvent.ACTION_UP):
if (touchMoveEndChecker) { // means 2 continuous ACTION_UP, or
// real finger up after moving
mDrawOpacityBackground = false;
if (mActiveBitmap != null) {
// push a map to bitmap and clone of current matrix
mOperationStack
.push(new BitmapOperationMap(mActiveBitmap, new Matrix(
mActiveBitmap.getCurrentMatrix()),
BitmapOperationMap.OPERATION.ADD));
mActiveBitmap.deActivate();
}
}
touchMoveEndChecker = true;
default:
break;
}
invalidate();
return true;
}
};
public void addOverlayBitmap(DraggableBitmap dBitmap, float scale) {
Matrix marginMtx = new Matrix();
marginMtx.postTranslate(mInnerImageBounds.left, mInnerImageBounds.top);
dBitmap.setMarginMatrix(marginMtx);
Matrix curMtx = new Matrix();
curMtx.postConcat(marginMtx);
dBitmap.setCurrentMatrix(curMtx);
mOperationStack
.push(new BitmapOperationMap(dBitmap, null, BitmapOperationMap.OPERATION.NEW));
mOperationStack.push(new BitmapOperationMap(dBitmap, dBitmap.getCurrentMatrix(),
BitmapOperationMap.OPERATION.ADD));
mOverlayBitmaps.add(dBitmap);
}
private int getActiveBitmap(float event_x, float event_y) {
int size = mOverlayBitmaps.size();
int retidx = -1;
DraggableBitmap retBmp = null;
// search for all bitmap to find closest to finger
for (int i = 0; i < size; i++) {
DraggableBitmap dBmp = mOverlayBitmaps.get(i);
dBmp.deActivate();
float bmp_x = 0;
float bmp_y = 0;
RectF r = new RectF(0, 0, dBmp.mBitmap.getWidth(), dBmp.mBitmap.getHeight());
Matrix mtx = dBmp.getCurrentMatrix() == null ? dBmp.getMarginMatrix() : dBmp
.getCurrentMatrix();
mtx.mapRect(r);
bmp_x = r.left;
bmp_y = r.top;
if (event_x >= bmp_x && event_x < (bmp_x + r.width()) && event_y >= bmp_y
&& event_y < (bmp_y + r.height())) {
retBmp = dBmp;
retidx = i;
}
}
if (retBmp != null) {
if (!retBmp.isTouched()) {
retBmp.setTouched(true);
}
retBmp.activate();
}
return retidx;
}
private float spacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
/** Calculate the mid point of the first two fingers */
private void midPoint(PointF point, MotionEvent event) {
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
private float rotation(MotionEvent event) {
double delta_x = (event.getX(0) - event.getX(1));
double delta_y = (event.getY(0) - event.getY(1));
double rad = Math.atan2(delta_y, delta_x);
return (float) Math.toDegrees(rad);
}
public List<DraggableBitmap> getOverlayList() {
return mOverlayBitmaps;
}
public void undo() {
if (!mOperationStack.empty()) {
BitmapOperationMap prev = mOperationStack.pop();
if (!mOperationStack.empty()) { // current stack is final operation
prev = mOperationStack.peek();
}
DraggableBitmap bmp = prev.getDraggableBitmap();
Matrix mtx = prev.getOperationMatrix();
switch (prev.getOption()) {
case NEW: // if action is create new, then delete
mOverlayBitmaps.remove(bmp);
break;
case ADD:
bmp.setCurrentMatrix(mtx);
break;
case DELETE: // not implement yet
break;
default:
break;
}
}
}
@Override
protected void onDraw(Canvas canvas) { // [TODO] khi xoay man hinh error
super.onDraw(canvas);
RectF bitmapRect = getInnerBitmapSize();
if (bitmapRect == null) return;
mInnerImageBounds = bitmapRect;
canvas.clipRect(bitmapRect);
// loop to draw all bitmap
Enumeration<DraggableBitmap> e = Collections.enumeration(mOverlayBitmaps);
while (e.hasMoreElements()) {
DraggableBitmap dBmp = (DraggableBitmap) e.nextElement();
if (true) {
if (dBmp.getCurrentMatrix() != null) {
canvas.drawBitmap(dBmp.mBitmap, dBmp.getCurrentMatrix(), null);
RectF r = getStampBounding(dBmp);
if (mDrawOpacityBackground && dBmp == mActiveBitmap) {
mPaint.setColor(0x00000000);
mPaint.setStyle(Style.FILL);
mPaint.setAlpha(20);
canvas.drawRect(r, mPaint);
}
}
}
}
}
public RectF getInnerBitmapSize() {
RectF bitmapRect = new RectF();
if (this.getDrawable() == null) return null;
bitmapRect.right = this.getDrawable().getIntrinsicWidth();
bitmapRect.bottom = this.getDrawable().getIntrinsicHeight();
Matrix m = this.getImageMatrix();
m.mapRect(bitmapRect);
return bitmapRect;
}
private RectF getStampBounding(DraggableBitmap bmp) {
if (bmp.mBitmap == null) return null;
RectF r = new RectF(0, 0, bmp.mBitmap.getWidth(), bmp.mBitmap.getHeight());
bmp.getCurrentMatrix().mapRect(r);
return r;
}
public void deleteActiveBitmap() {
if (mActiveBitmap == null) return;
mOverlayBitmaps.remove(mActiveBitmap);
}
public void flipActiveBitmap() {
try {
Matrix flipHorizontalMtx = new Matrix();
flipHorizontalMtx.setScale(-1, 1);
flipHorizontalMtx.postTranslate((float) (mActiveBitmap.mBitmap.getWidth()), (float) 0);
Matrix mtx = mActiveBitmap.getCurrentMatrix();
mtx.preConcat(flipHorizontalMtx);
mActiveBitmap.setCurrentMatrix(mtx);
} catch (NullPointerException e) {
Log.v(TAG, "active bitmap is null");
} catch (Exception e) {
Log.v(TAG, "error ocurred");
}
}
public void rearrangeOverlayList() {
int idx = mOverlayBitmaps.indexOf(mActiveBitmap);
mOverlayBitmaps.add(mActiveBitmap);
mOverlayBitmaps.remove(idx);
}
}
|
|
package com.example.moodly.Activities;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Handler;
import android.support.design.widget.AppBarLayout;
import android.support.design.widget.TabLayout;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.example.moodly.Controllers.MoodController;
import com.example.moodly.Controllers.UserController;
import com.example.moodly.R;
public class MoodBase extends AppCompatActivity {
/**
* The {@link android.support.v4.view.PagerAdapter} that will provide
* fragments for each of the sections. We use a
* {@link FragmentPagerAdapter} derivative, which will keep every
* loaded fragment in memory. If this becomes too memory intensive, it
* may be best to switch to a
* {@link android.support.v4.app.FragmentStatePagerAdapter}.
*/
private SectionsPagerAdapter mSectionsPagerAdapter;
/**
* The {@link ViewPager} that will host the section contents.
* The {@link ViewPager} that will host the section contents.
*/
private ViewPager mViewPager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_mood_list);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
AppBarLayout.LayoutParams params = (AppBarLayout.LayoutParams) toolbar.getLayoutParams();
params.setScrollFlags(0);
setSupportActionBar(toolbar);
// Create the adapter that will return a fragment for each of the three
// primary sections of the activity.
mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager());
// Set up the ViewPager with the sections adapter.
mViewPager = (ViewPager) findViewById(R.id.container);
mViewPager.setAdapter(mSectionsPagerAdapter);
TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs);
tabLayout.setupWithViewPager(mViewPager);
// checks periodically
handler = new Handler();
synchronizeNetwork.run();
}
private int repeatInterval = 30000;
private Handler handler;
Runnable synchronizeNetwork = new Runnable() {
@Override
public void run() {
try {
updateElasticSearch();
} finally {
// 100% guarantee that this always happens, even if
// your update method throws an exception
handler.postDelayed(synchronizeNetwork, repeatInterval);
}
}
};
/**
* Synchronization of mood events depending if network is available or
* not.
*/
private void updateElasticSearch() {
if (networkAvailable()) {
if (MoodController.getInstance().getAddCompletion()) {
MoodController.getInstance().syncAddList();
}
if(MoodController.getInstance().getDeleteCompletion()) {
MoodController.getInstance().syncDeleteList();
}
}
else {
Toast.makeText(MoodBase.this, "Not connected", Toast.LENGTH_SHORT).show();
}
}
/**
* Checks if the application is currently connected to the internet or not.
* @return boolean if the application is connected to the internet or not
*/
private boolean networkAvailable() {
ConnectivityManager connectivityManager
= (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
/**
* Inflate the menu; this adds items to the action bar if it is present.
* @param menu
* @return true if successful
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_view_mood_list, menu);
return true;
}
/**
* Based on MenuItem, it will start the relevant activity.
* @param item MenuItem chosen
* @return boolean if activity is started successfully
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
switch (item.getItemId()) {
case R.id.show_map:
Toast.makeText(this, "Showing Map", Toast.LENGTH_SHORT).show();
final Intent intentMap = new Intent();
intentMap.setClass(MoodBase.this, MapViewMoods.class);
if(mViewPager.getCurrentItem() == 0) {
// for history moods
intentMap.putExtra("list_type", true);
} else {
// for following moods
intentMap.putExtra("list_type", false);
}
startActivity(intentMap);
return true;
case R.id.action_social:
if (networkAvailable()) {
Toast.makeText(this, "Social", Toast.LENGTH_SHORT).show();
Intent intent = new Intent(this, SocialBase.class);
startActivity(intent);
return super.onOptionsItemSelected(item);
}
else{
Toast.makeText(this, "Cannot access social tab when offline!", Toast.LENGTH_SHORT).show();
return true;
}
case R.id.log_out:
Toast.makeText(this, "Goodbye, " + UserController.getInstance().getCurrentUser().getName(), Toast.LENGTH_SHORT).show();
Intent logOut = new Intent(this, LoginScreen.class);
logOut.putExtra("toClear", "YES");
logOut.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
startActivity(logOut);
default:
return super.onOptionsItemSelected(item);
}
}
/**
* A {@link FragmentPagerAdapter} that returns a fragment corresponding to
* one of the sections/tabs/pages.
*/
public class SectionsPagerAdapter extends FragmentPagerAdapter {
public SectionsPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
// return current tab
switch (position) {
case 0:
MoodHistoryList tab1 = new MoodHistoryList();
return tab1;
case 1:
MoodFollowingList tab2 = new MoodFollowingList();
return tab2;
default:
return null;
}
}
/**
* get page counts
* @return 2
*/
@Override
public int getCount() {
// Show 3 total pages.
return 2;
}
/**
* Gets the page title.
* @param position
* @return page title
*/
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case 0:
return "History";
case 1:
return "Following";
}
return null;
}
}
}
|
|
package com.oregonscientific.meep.communicator.activity;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.RemoteException;
import android.support.v4.app.FragmentActivity;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.AdapterView.OnItemLongClickListener;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import com.oregonscientific.meep.ServiceManager;
import com.oregonscientific.meep.account.Account;
import com.oregonscientific.meep.account.AccountManager;
import com.oregonscientific.meep.account.IAccountServiceCallback;
import com.oregonscientific.meep.communicator.CommunicatorService;
import com.oregonscientific.meep.communicator.CommunicatorServiceConnector;
import com.oregonscientific.meep.communicator.CommunicatorServiceHandler.FriendRequestStatus;
import com.oregonscientific.meep.communicator.Conversation;
import com.oregonscientific.meep.communicator.ConversationMessage;
import com.oregonscientific.meep.communicator.Friend;
import com.oregonscientific.meep.communicator.ICommunicatorServiceCallback;
import com.oregonscientific.meep.communicator.R;
import com.oregonscientific.meep.communicator.User;
import com.oregonscientific.meep.communicator.view.IDeleteFriendPopUpInterface;
import com.oregonscientific.meep.communicator.view.IPopupInterface;
import com.oregonscientific.meep.communicator.view.PopUpFragment;
import com.oregonscientific.meep.communicator.view.conversation.Emoticon;
import com.oregonscientific.meep.communicator.view.friend.BaseFriend;
import com.oregonscientific.meep.communicator.view.friend.FriendAdapter;
import com.oregonscientific.meep.permission.PermissionManager;
import com.oregonscientific.meep.util.BitmapUtils;
import com.oregonscientific.meep.util.ImageDownloader;
import com.oregonscientific.meep.util.NetworkUtils;
import com.oregonscientific.meep.widget.StrokedTextView;
/**
* Main activity of MEEP Communicator
*/
public class CommunicatorActivity extends FragmentActivity {
private View friendInDeleteMode = null;
private final String TAG = getClass().getSimpleName();
private FriendAdapter friendAdapter = null;
private final String CONVERSATION_FRAGMENT = "conversationFragment";
private final String POPUP_DIALOG = "dialog";
private String IMAGE_CACHE_DIR = "communicator";
private CommunicatorServiceConnector mConnector;
private ImageDownloader mImageDownloader = null;
private User mUser = null;
private final IAccountServiceCallback mAccountServiceCallback = new IAccountServiceCallback.Stub() {
@Override
public void onUpdateUser(boolean arg0, String arg1, Account arg2)
throws RemoteException {
}
@Override
public void onSignOut(boolean arg0, String arg1, Account arg2)
throws RemoteException {
runOnUiThread(new Runnable() {
@Override
public void run() {
onSignOutCallback();
}
});
}
@Override
public void onSignIn(boolean arg0, String arg1, Account arg2)
throws RemoteException {
runOnUiThread(new Runnable() {
@Override
public void run() {
onSignInCallback();
}
});
}
};
private final ICommunicatorServiceCallback mCommunicatorCallback = new ICommunicatorServiceCallback() {
@Override
public void onServiceDisconnected() {
}
@Override
public void onServiceConnected() {
final Handler handler = new Handler();
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
User user = getLoggedInUser();
boolean isOffline = true;
if (user == null) {
user = getLastLoggedInUser();
isOffline = true;
} else {
isOffline = false;
}
final boolean statusLightState = !isOffline;
if (user != null) {
setUser(user);
handler.post(new Runnable() {
@Override
public void run() {
// update user profile
updateUserProfile(getUser());
refreshFriendList(getUser());
getFriendListFromServer();
setStatusLight(statusLightState);
if (!statusLightState)
showNetworkDialog();
}
});
}
}
});
}
@Override
public void onFriendRequestSent(String meepTag, String errorMessage) {
doFriendRequestSent(meepTag, errorMessage);
}
@Override
public void onFriendRequestReceived(
final String meepTag,
final String nickname,
final String friendMessage,
final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendRequestReceived(meepTag, nickname, friendMessage, errorMessage);
}
});
}
@Override
public void onFriendListReceived(final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendListReceived(errorMessage);
// to perform pending actions
performPendingAction();
}
});
}
@Override
public void onFriendDeleted(final String accountId, final String nickname, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendDeleted(accountId, nickname, errorMessage);
}
});
}
@Override
public void onFriendAccepted(final String meepTag, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendAccepted(meepTag, errorMessage);
}
});
}
@Override
public void onChatMessageSent(final String message, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doMessageSent(message, errorMessage);
}
});
}
@Override
public void onChatMessageReceived(final ConversationMessage message, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doChatMessageReceived(message, errorMessage);
}
});
}
@Override
public void onFriendSearched(final String meepTag,final String nickname, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendSearched(meepTag, nickname, errorMessage);
}
});
}
@Override
public void onFriendRequestStatusReceived(final String meepTag, final String name,
final FriendRequestStatus status, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendRequestStatusReceived(meepTag, name, status, errorMessage);
}
});
}
@Override
public void onFriendRejected(final String meepTag, final String errorMessage) {
runOnUiThread(new Runnable() {
@Override
public void run() {
doFriendRejected(meepTag, errorMessage);
}
});
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mConnector = new CommunicatorServiceConnector(this, mCommunicatorCallback);
mConnector.connect();
AccountManager am = (AccountManager) ServiceManager.getService(CommunicatorActivity.this, ServiceManager.ACCOUNT_SERVICE);
am.registerCallback(mAccountServiceCallback);
initUI();
}
/**
* Handle received chat message and request from notification center
*
*/
private void performPendingAction() {
Intent intent = getIntent();
String action = null;
if (intent != null) {
action = intent.getAction();
if (action == null)
// no pending action
return;
} else {
// no pending intent
return;
}
if (action.equals(CommunicatorService.COMMUNICATOR_ACTION_RECEIVED_FRIEND_REQUEST)) {
Bundle extras = intent.getExtras();
if (extras != null) {
String meepTag = extras.getString(CommunicatorService.NOTIFICATION_KEY_MEEP_TAG);
String nickname = extras.getString(CommunicatorService.NOTIFICATION_KEY_NICKNAME);
String friendMessage = extras.getString(CommunicatorService.NOTIFICATION_KEY_MESSAGE);
if (meepTag != null && nickname != null)
doFriendRequestReceived(meepTag, nickname, friendMessage, null);
}
}
else if (action.equals(CommunicatorService.COMMUNICATOR_ACTION_RECEIVED_CHAT_MESSAGE)) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
Bundle extras = intent.getExtras();
if (extras != null) {
Long conversationMsgId = extras.getLong(CommunicatorService.NOTIFICATION_KEY_CONVERSATION_MESSAGE_ID);
ConversationMessage cm = service.getConversationMessage(conversationMsgId);
if (cm != null)
showConversationDialog(cm.getConversation().getFriend());
}
}
}
}
}
/**
* Set the unread message count of a friend
* @param friend friend whose unread count is to be updated
*/
private void setUnreadCount(Friend friend, int unreadCount) {
final GridView gridView = (GridView) findViewById(R.id.friends);
if (gridView.getAdapter() != null) {
FriendAdapter frdAdapter = (FriendAdapter) gridView.getAdapter();
int position = -1;
for (int i=0 ; i < frdAdapter.getCount(); i++) {
Friend adapterFrd = frdAdapter.getItem(i);
if (adapterFrd.getAccountId() != null) {
if (friend.getAccountId() != null) {
if (friend.getAccountId().equals(adapterFrd.getAccountId())) {
position = i;
break;
}
}
}
}
if (position != -1) {
BaseFriend friendView = (BaseFriend) gridView.getChildAt(position);
if (friendView != null) {
friendView.setUnreadCount(unreadCount);
frdAdapter.notifyDataSetChanged();
}
}
}
}
/**
* Gets all friends of a user in database
* @param meepTag MEEP tag of user
* @return a list of friends containing all friends of the user in database
*/
private List<Friend> getFriends(User user) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
return service.getFriends(user);
}
}
return null;
}
/**
*
* Initialize the UI
*
*/
private void initUI() {
setContentView(R.layout.friend_view);
showLoading();
final List<Friend> friendList = new ArrayList<Friend>();
// set listener for add friends button
final LinearLayout addFriends = (LinearLayout) findViewById(R.id.add_friends);
addFriends.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showSearchFriendPopup();
}
});
// set friends to adapter
final GridView gridView = (GridView) findViewById(R.id.friends);
final FriendAdapter friendAdapter = new FriendAdapter(this, friendList);
gridView.setAdapter(friendAdapter);
// go to conversation view when friend is clicked
gridView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(
AdapterView<?> parent,
View view,
int position,
long id) {
GridView gridView = (GridView) findViewById(R.id.friends);
if (gridView != null) {
Friend friend = null;
FriendAdapter adapter = (FriendAdapter)gridView.getAdapter();
if (adapter != null && adapter.getCount() > position) {
friend = adapter.getItem(position);
}
showConversationDialog(friend);
}
}
});
// show delete icon when friend is long clicked
gridView.setOnItemLongClickListener(new OnItemLongClickListener() {
@Override
public boolean onItemLongClick(
AdapterView<?> parent,
View view,
final int position,
long id) {
GridView gridView = (GridView) findViewById(R.id.friends);
gridView.setEnabled(false);
int rowNumber = position / gridView.getNumColumns();
gridView.smoothScrollToPosition(rowNumber * gridView.getNumColumns());
setAddFriendsButtonEnabled(false);
BaseFriend friend = (BaseFriend) view;
View home = (View) parent.getParent();
home.getBackground().setAlpha(70);
for (int i = 0; i < gridView.getCount(); i++) {
View friendView = gridView.getChildAt(i);
if (friendView != null) {
friendView.setAlpha(0.2f);
}
}
friend.setAlpha(1.0f);
friend.findViewById(R.id.unread_count).setAlpha(0.1f);
friendInDeleteMode = friend;
ImageView cross = (ImageView) friend.findViewById(R.id.cross);
cross.setVisibility(View.VISIBLE);
cross.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
GridView gridView = (GridView) findViewById(R.id.friends);
FriendAdapter frdAdapter = (FriendAdapter) gridView.getAdapter();
Friend friend = frdAdapter.getItem(position);
showDeleteFriendPopup(friend);
onBackPressed();
}
});
return true;
}
});
final ImageView transitionUpView = (ImageView) findViewById(R.id.transition_up);
final ImageView transitionDownView = (ImageView) findViewById(R.id.transition_down);
transitionUpView.setVisibility(View.INVISIBLE);
transitionDownView.setVisibility(View.INVISIBLE);
gridView.setOnScrollListener(new OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
// Ignore
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
if (gridView.getFirstVisiblePosition() == 0) {
transitionUpView.setVisibility(View.INVISIBLE);
} else {
transitionUpView.setVisibility(View.VISIBLE);
}
}
});
setStatusLight(false);
}
private void showDeleteFriendPopup(Friend friend) {
String accountId = friend.getAccountId();
String nickname = friend.getName();
IDeleteFriendPopUpInterface listener = new IDeleteFriendPopUpInterface() {
@Override
public void onYesButtonPressed(PopUpFragment fragment, String listenerAccId) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null)
service.removeFriend(listenerAccId);
fragment.setDeleteFriendPopupListener(null);
fragment.dismiss();
}
}
@Override
public void onNoButtonPressed(PopUpFragment fragment) {
fragment.setDeleteFriendPopupListener(null);
fragment.dismiss();
}
};
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.DELETE_FRIEND_DIALOG_ID, accountId, nickname);
fragment.setDeleteFriendPopupListener(listener);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void showSearchFriendPopup() {
final IPopupInterface listener = new IPopupInterface() {
@Override
public void onAddButtonPressed(PopUpFragment fragment, String message) {
}
@Override
public void onDeclineButtonPressed(PopUpFragment thisFragment,
String meepTag) {}
@Override
public void onAcceptButtonPressed(final PopUpFragment thisFragment,
String meepTag) {}
@Override
public void onSearchButtonPressed(
final PopUpFragment fragment,
final String message) {
if (message == null || message.length() == 0) {
fragment.dismiss();
showErrorDialog(getString(R.string.please_enter_meep_id), null, PopUpFragment.ERROR_ACTION_SEARCH_FRIEND);
return;
}
final User user = getUser();
if (user == null) {
if (fragment == null) {
PopUpFragment popupFragment = (PopUpFragment)getSupportFragmentManager().findFragmentByTag(POPUP_DIALOG);
popupFragment.dismiss();
} else {
fragment.dismiss();
}
return;
}
if (message.equals(user.getMeepTag())) {
showErrorDialog(getString(R.string.cannot_add_yourself), null);
return;
}
Handler handler = new Handler();
searchFriendAsynchronously(user, handler, message);
fragment.dismiss();
}
@Override
public void onYesButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onNoButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onOkButtonPressed(PopUpFragment thisFragment, String action) {
// Ignore
}
};
setAddFriendsButtonEnabled(false);
PopUpFragment oldFragment = (PopUpFragment) getSupportFragmentManager().findFragmentByTag(POPUP_DIALOG);
if (oldFragment != null) {
if (oldFragment.getType() == PopUpFragment.SEARCH_FRIENDS_DIALOG_ID) {
return;
}
}
PopUpFragment newFragment = PopUpFragment.newInstance(PopUpFragment.SEARCH_FRIENDS_DIALOG_ID, null, null);
newFragment.setListener(listener);
newFragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void searchFriendAsynchronously(final User user, final Handler handler, final String message) {
Runnable r = new Runnable() {
@Override
public void run() {
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
if (user != null) {
PermissionManager pm = (PermissionManager) ServiceManager.getService(CommunicatorActivity.this, ServiceManager.PERMISSION_SERVICE);
Log.e(TAG, "Getting permission @ SearchUser");
if (pm != null) {
if (!pm.containsBadwordBlocking(user.getAccountId(), message)) {
Log.e(TAG, "Finish get permission @ Search User");
mConnector.getService().searchUser(message);
} else {
handler.post(new Runnable() {
public void run() {
showErrorDialog(getString(R.string.blocked), PopUpFragment.ERROR_ACTION_SEARCH_FRIEND);
}
});
}
}
}
}
});
}
};
// spawn another thread to search friend
new Thread(r).start();
}
private void showDeleteFriendSuccessPopup(String nickname) {
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.DELETE_FRIEND_SUCCESS_DIALOG_ID, nickname, null);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void showFriendRequestSuccessDialog() {
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.FRIEND_REQUEST_SUCCESS_DIALOG_ID, null, null);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.communicator, menu);
return true;
}
@Override
public void onBackPressed() {
// override back button when in delete mode
if (friendInDeleteMode != null) {
try {
ImageView cross = (ImageView) friendInDeleteMode.findViewById(R.id.cross);
cross.setVisibility(View.GONE);
((View) friendInDeleteMode.getParent().getParent()).getBackground().setAlpha(255);
GridView gridView = (GridView) findViewById(R.id.friends);
for (int i = 0; i < gridView.getCount(); i++) {
View friendView = gridView.getChildAt(i);
if (friendView != null) {
friendView.setAlpha(1.0f);
friendView.findViewById(R.id.unread_count).setAlpha(1.0f);
}
}
friendInDeleteMode = null;
} catch (Exception ex) {
// refresh friend list if encountered error
refreshFriendList(getUser());
} finally {
setGridViewEnabled(true);
setAddFriendsButtonEnabled(true);
}
} else {
super.onBackPressed();
}
}
@Override
public void onDestroy() {
if (mConnector != null) {
mConnector.disconnect();
}
AccountManager am = (AccountManager) ServiceManager.getService(CommunicatorActivity.this, ServiceManager.ACCOUNT_SERVICE);
am.unregisterCallback(mAccountServiceCallback);
ServiceManager.unbindServices(this);
ConversationFragment.clearConversations();
Emoticon.clearEmoticonMap();
super.onDestroy();
}
/**
* Set the enabled state of grid view
* @param enabled true if grid view is enabled, false otherwise
*/
public void setGridViewEnabled(boolean enabled) {
GridView gridView = (GridView) findViewById(R.id.friends);
gridView.setEnabled(enabled);
}
/**
* Set the enabled state of add friend button
* @param enabled true if add friend button is enabled, false otherwise
*/
public void setAddFriendsButtonEnabled(boolean enabled) {
LinearLayout addFriends = (LinearLayout) findViewById(R.id.add_friends);
addFriends.setEnabled(enabled);
}
/**
* Add a friend to the friend adapter
* @param newFriend the new friend to be added
*/
public void addFriendToAdapter(Friend newFriend) {
GridView gridView = (GridView) findViewById(R.id.friends);
if (gridView != null) {
((FriendAdapter) gridView.getAdapter()).add(newFriend);
}
((FriendAdapter) gridView.getAdapter()).notifyDataSetChanged();
gridView.post(new Runnable() {
@Override
public void run() {
GridView gridView = (GridView) findViewById(R.id.friends);
ImageView transitionDownView = (ImageView) findViewById(R.id.transition_down);
if (gridView.getLastVisiblePosition() < gridView.getCount()) {
transitionDownView.setVisibility(View.VISIBLE);
} else {
transitionDownView.setVisibility(View.INVISIBLE);
}
dismissLoading();
}
});
}
/**
* Handle response from search friend request
*
* @param meepTag The meepTag of the searched friend
* @param nickname The nickname of the searched friend
* @param errorMessage The error message from the server response
*/
private void doFriendSearched(final String meepTag, final String nickname, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, getString(R.string.no_results), PopUpFragment.ERROR_ACTION_SEARCH_FRIEND);
return;
}
final IPopupInterface listener = new IPopupInterface() {
@Override
public void onAddButtonPressed(final PopUpFragment fragment, final String message) {
final Handler handler = new Handler();
addFriendAsynchronously(handler, message, nickname, meepTag);
fragment.dismiss();
}
@Override
public void onDeclineButtonPressed(PopUpFragment thisFragment,
String meepTag) {}
@Override
public void onAcceptButtonPressed(PopUpFragment thisFragment,
String meepTag) {}
@Override
public void onSearchButtonPressed(PopUpFragment fragment,
String message) {
}
@Override
public void onYesButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onNoButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onOkButtonPressed(PopUpFragment thisFragment, String action) {
// Ignore
}
};
PopUpFragment newFragment = PopUpFragment.newInstance(PopUpFragment.ADD_FRIENDS_DIALOG_ID, nickname, null);
newFragment.setListener(listener);
newFragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void addFriendAsynchronously(final Handler handler, final String message, final String nickname, final String meepTag) {
Runnable r = new Runnable() {
@Override
public void run() {
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
User user = getLoggedInUser();
String enclosedMessage = message;
if (user != null) {
if (enclosedMessage == null || enclosedMessage.length() == 0) {
enclosedMessage = getString(R.string.add_friend_hint, nickname);
}
PermissionManager pm = (PermissionManager) ServiceManager.getService(CommunicatorActivity.this, ServiceManager.PERMISSION_SERVICE);
if (!pm.isBadword(user.getAccountId(), enclosedMessage)) {
mConnector.getService().addFriend(meepTag, enclosedMessage);
} else {
handler.post(new Runnable() {
public void run() {
showErrorDialog(getString(R.string.blocked), null);
}
});
}
}
}
});
}
};
new Thread(r).start();
}
/**
* Handle response from sending a friend request
*
* @param meepTag The meepTag of the searched friend
* @param errorMessage The error message from the server response
*/
private void doFriendRequestSent(String meepTag, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
showFriendRequestSuccessDialog();
}
/**
* Handle response from delete-friend request
*
* @param accountId The unique identifier for a friend
* @param errorMessage The error message from the server response
*/
private void doFriendDeleted(String accountId, String nickname, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
showDeleteFriendSuccessPopup(nickname);
refreshFriendList(getUser());
}
/**
* Display the conversation dialog of within user and his/her friend
*
* @param friend The friend who is in user's conversation dialog
*/
private void showConversationDialog(final Friend friend) {
if (friend != null) {
setMessagesToRead(friend);
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.cancelAllNotificationsForFriend(getUser(), friend);
}
}
setGridViewEnabled(false);
ConversationFragment fragment = (ConversationFragment) getSupportFragmentManager().findFragmentByTag(CONVERSATION_FRAGMENT);
if (fragment == null) {
final Context context = this;
IConversationInterface conversationInterface = new IConversationInterface() {
@Override
public void onSendButtonPressed(final ConversationFragment fragment,
final String message, final ConversationMessage conversationMessage) {
final Handler handler = new Handler();
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
if (!NetworkUtils.hasConnection(CommunicatorActivity.this)) {
handler.post(new Runnable() {
@Override
public void run() {
showNetworkDialog2();
}
});
return;
}
sendMessageAsynchronously(fragment, handler, message, conversationMessage);
}
});
}
@Override
public void onConversationTabChanged(
ConversationFragment fragment,
Conversation conversation) {
if (conversation != null) {
Friend friend = conversation.getFriend();
setMessagesToRead(friend);
}
}
};
fragment = ConversationFragment.newInstance();
fragment.setListener(conversationInterface);
}
// show the fragment
Conversation conversation = mConnector.getService().getConversation(getUser(), friend.getAccountId());
if (conversation != null) {
fragment.addConversation(this, friend, conversation);
fragment.show(getSupportFragmentManager(), CONVERSATION_FRAGMENT);
}
}
}
private void sendMessageAsynchronously(final ConversationFragment fragment, final Handler handler, final String message, final ConversationMessage conversationMessage) {
User user = getLoggedInUser();
if (user != null) {
PermissionManager pm = (PermissionManager) ServiceManager.getService(CommunicatorActivity.this, ServiceManager.PERMISSION_SERVICE);
if (pm != null) {
String replacedMessage = pm.replaceBadwordsBlocking(user.getAccountId(), message, CommunicatorService.BAD_WORD_REPLACEMENT_STRING);
Friend friend = conversationMessage.getConversation().getFriend();
mConnector.getService().sendChatMessage(friend.getAccountId(), replacedMessage);
final ConversationMessage newMessage = new ConversationMessage(conversationMessage);
newMessage.setContent(replacedMessage);
handler.post(new Runnable() {
@Override
public void run() {
fragment.addMessageToMessageList(newMessage);
}
});
}
}
}
/**
* Set all the messages between user and friends to read, and update the unread count
*
* @param accountId The unique identifier for a friend
* @param errorMessage The error message from the server response
*/
private void setMessagesToRead(Friend friend) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.setMessagesToRead(friend);
refreshUnreadCount(friend);
}
}
}
/**
* Set one the message between user and friends to read, and update the unread count
*
* @param friend The unique identifier for a friend
* @param message Conversation Message object that to be modify
*/
private void setMessageToRead(Friend friend, ConversationMessage message) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.setMessageToRead(friend, message);
refreshUnreadCount(friend);
}
}
}
/**
* Set one the message between user and friends to read, and update the unread count
*
* @param message Conversation Message object that received from server
* @param errorMessage The error message from the server response
*/
private void doChatMessageReceived(ConversationMessage message, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
if (message == null)
return;
// update unread badge on friend list
Friend friend = null;
try {
friend = message.getConversation().getFriend();
if (friend != null) {
friend.refresh();
}
} catch ( SQLException ex) {
Log.e(TAG, "Fail to refresh friend");
return;
}
if (friend != null) {
refreshUnreadCount(friend);
sortFriendList(getUser());
ConversationFragment fragment = (ConversationFragment) getSupportFragmentManager().findFragmentByTag(CONVERSATION_FRAGMENT);
if (fragment != null) {
SharedPreferences settings = getSharedPreferences("MEEP Communicator", 0);
String currentlyChattingAccountId= settings.getString("FriendCurrentChatting", null);
if (friend.getAccountId().equals(currentlyChattingAccountId)) {
setMessageToRead(friend, message);
fragment.onReceiveConversationMessage(message);
}
}
}
}
/**
* Handle the friend request received from server,
* show a Pop up dialog if errorMessage is null
*
* @param meepTag The meepTag of the friend
* @param nickname The nickname of the friend
* @param erroMessage The error message in the response of server, null when the request was successful
*/
private void doFriendRequestReceived(String meepTag, String nickname, String friendMessage, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
final IPopupInterface listener = new IPopupInterface() {
@Override
public void onAddButtonPressed(PopUpFragment fragment, String message) {
}
@Override
public void onDeclineButtonPressed(PopUpFragment thisFragment,
String meepTag) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.declineFriend(meepTag);
thisFragment.dismiss();
}
}
}
@Override
public void onAcceptButtonPressed(PopUpFragment thisFragment,
String meepTag) {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.acceptFriend(meepTag);
thisFragment.dismiss();
}
}
}
@Override
public void onSearchButtonPressed(PopUpFragment fragment, String message) {
// Ignore
}
@Override
public void onYesButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onNoButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onOkButtonPressed(PopUpFragment thisFragment, String action) {
// Ignore
}
};
friendMessage = friendMessage == null ? "" : friendMessage;
PopUpFragment newFragment = PopUpFragment.newInstance(PopUpFragment.ACCEPT_FRIEND_DIALOG_ID, meepTag, nickname, friendMessage);
newFragment.setListener(listener);
newFragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
/**
* Handle friend list received from server, refresh the friend list if the request was successful
*
* @param erroMessage The error message in the response of server, null when the request was successful
*/
private void doFriendListReceived(String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
refreshFriendList(getUser());
}
/**
* Handle friend accepted responses, refresh the friend list if the request was successful
*
* @param erroMessage The error message in the response of server, null when the request was successful
*/
private void doFriendAccepted(String accountId, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
refreshFriendList(getUser());
}
/**
* Handle friend reject responses, refresh the friend list if the request was successful
*
* @param erroMessage The error message in the response of server, null when the request was successful
*/
private void doFriendRejected(String accountId, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
}
/**
* Handle message sent to server, display error dialog if the request was not successful
*
* @param erroMessage The error message in the response of server, null when the request was successful
*/
private void doMessageSent(String message, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
}
}
private void doFriendRequestStatusReceived(String meepTag, String name, FriendRequestStatus status, String errorMessage) {
if (errorMessage != null) {
showErrorDialog(errorMessage, null);
return;
}
int rString = -1;
String title = null;
switch (status) {
case ALL_ACCEPT:
rString = R.string.friend_request_approved;
title = getString(R.string.nice);
break;
case OPPOSITE_PARENT_REJECT:
rString = R.string.friend_request_oppo_parent_reject;
break;
case SELF_PARENT_REJECT:
rString = R.string.friend_request_self_parent_reject;
break;
case FRIEND_REJECT:
rString = R.string.friend_request_friend_reject;
break;
default:
// Unexpected friend request status
return;
}
if (rString != -1) {
String message = getString(rString, name);
PopUpFragment newFragment = PopUpFragment.newInstance(PopUpFragment.FRIEND_REQUEST_STATUS_DIALOG_ID, message, title, null);
newFragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
}
/**
* Refresh and sort the friend list
*/
private synchronized void refreshFriendList(User user) {
if (user == null)
return;
showLoading();
List<Friend> friends = getFriends(user);
if (friends == null || friends.size() == 0) {
dismissLoading();
return;
}
GridView gridView = (GridView) findViewById(R.id.friends);
FriendAdapter frdAdapter = new FriendAdapter(this, friends);
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null) {
service.sortFriends(user, frdAdapter);
}
}
gridView.setAdapter(frdAdapter);
gridView.post(new Runnable() {
@Override
public void run() {
GridView gridView = (GridView) findViewById(R.id.friends);
ImageView transitionDownView = (ImageView) findViewById(R.id.transition_down);
if (gridView.getLastVisiblePosition() < gridView.getCount()) {
transitionDownView.setVisibility(View.VISIBLE);
} else {
transitionDownView.setVisibility(View.INVISIBLE);
}
dismissLoading();
}
});
dismissLoading();
}
/**
* Refresh the unread message count for a specific friend
*
* @param friend
*/
private void refreshUnreadCount(Friend friend) {
int count = getUnreadMessageCount(friend);
setUnreadCount(friend, count);
}
/**
* Retrieve unread message count from database for a specific friend
*
* @param friend The friend UI that to be updated
* @return The unread count of the friend
*/
public int getUnreadMessageCount(Friend friend) {
CommunicatorService service = mConnector.getService();
if (service != null) {
return service.getUnreadMessageCount(getUser(), friend.getAccountId());
}
return 0;
}
/**
* Retrieve currently logged in user from service
*
* @return The currently logged in user object
*/
public User getLoggedInUser() {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null)
return service.getLoggedInUser();
}
return null;
}
/**
* Retrieve last logged in user from service
*
* @return The last logged in user object
*/
public User getLastLoggedInUser() {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null)
return service.getLastLoggedInUser();
}
return null;
}
/**
* Shows a error dialog pop up
*
* @param errorMessage The message that will be shown in the pop-up
*/
private void showErrorDialog(String errorMessage, String title, String action) {
IPopupInterface listener = null;
if (action != null) {
listener = new IPopupInterface() {
@Override
public void onYesButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onSearchButtonPressed(PopUpFragment fragment, String message) {
// Ignore
}
@Override
public void onOkButtonPressed(PopUpFragment thisFragment, String action) {
performErrorAction(action);
}
@Override
public void onNoButtonPressed(PopUpFragment thisFragment) {
// Ignore
}
@Override
public void onDeclineButtonPressed(PopUpFragment thisFragment, String meepTag) {
// Ignore
}
@Override
public void onAddButtonPressed(PopUpFragment fragment, String message) {
// Ignore
}
@Override
public void onAcceptButtonPressed(PopUpFragment thisFragment, String meepTag) {
// Ignore
}
};
}
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.ERROR_MESSAGE_DIALOG_ID, errorMessage, title, action);
fragment.setListener(listener);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void showErrorDialog(String errorMessage, String action) {
showErrorDialog(errorMessage, null, action);
}
private void performErrorAction (String action) {
if (action.equals(PopUpFragment.ERROR_ACTION_SEARCH_FRIEND)) {
showSearchFriendPopup();
}
}
/**
* Update user profile, including logged in user name and avatar
*
*/
private void updateUserProfile(final User user) {
StrokedTextView name = (StrokedTextView) findViewById(R.id.kidname);
if (name != null) {
name.setText(user.getFirstName());
}
// set current user's icon
ImageView icon = (ImageView) findViewById(R.id.kidicon);
if (icon != null) {
String iconAddress = user.getIconAddress();
if (iconAddress == null || iconAddress.length() == 0) {
Bitmap bitmap = BitmapUtils.decodeSampledBitmapFromResource(getResources(), R.drawable.default_avatar, icon.getWidth(), icon.getHeight());
icon.setImageBitmap(bitmap);
} else {
ImageDownloader imageDownloader = getImageDownloader();
if (imageDownloader != null) {
imageDownloader.download(iconAddress, R.drawable.avatar, icon.getLayoutParams().width, icon.getLayoutParams().height, icon);
}
}
}
}
/**
* Reorder the friend list base on the sorting algorithm from service and
* update the UI accordingly
*
*/
private void sortFriendList(User user) {
GridView gridView = (GridView) findViewById(R.id.friends);
FriendAdapter adapter = (FriendAdapter) gridView.getAdapter();
CommunicatorService service = mConnector.getService();
if (service != null) {
service.sortFriends(user, adapter);
}
adapter.notifyDataSetChanged();
}
/**
* Show the loading spinner
*/
private void showLoading() {
ProgressBar pb = (ProgressBar) findViewById(R.id.loading);
pb.setVisibility(View.VISIBLE);
}
/**
* Hide the loading spinner
*/
private void dismissLoading() {
ProgressBar pb = (ProgressBar) findViewById(R.id.loading);
pb.setVisibility(View.INVISIBLE);
}
/**
* Get the Image downloader instance, create if an instance is not exist
*
* @return the ImageDownloader object
*/
public ImageDownloader getImageDownloader() {
if (mImageDownloader == null) {
mImageDownloader = new ImageDownloader(this, IMAGE_CACHE_DIR);
}
return mImageDownloader;
}
private void getFriendListFromServer() {
if (mConnector != null) {
CommunicatorService service = mConnector.getService();
if (service != null)
service.syncFriendList();
}
}
public synchronized User getUser() {
return mUser;
}
private synchronized void setUser(User user) {
mUser = user;
}
private void setStatusLight(final boolean online) {
runOnUiThread(new Runnable() {
@Override
public void run() {
ImageView statusView = (ImageView) findViewById(R.id.statusLight);
if (statusView != null) {
if (online) {
statusView.setImageDrawable(getResources().getDrawable(R.drawable.green));
} else {
statusView.setImageDrawable(getResources().getDrawable(R.drawable.red));
}
}
}
}
);
}
private void showNetworkDialog() {
final IPopupInterface listener = new IPopupInterface() {
@Override
public void onAddButtonPressed(PopUpFragment fragment, String message) {
}
@Override
public void onDeclineButtonPressed(PopUpFragment thisFragment, String meepTag) {
//TODO: decline a friend
thisFragment.dismiss();
}
@Override
public void onAcceptButtonPressed(PopUpFragment thisFragment, String meepTag) {
// Ignore
}
@Override
public void onSearchButtonPressed(PopUpFragment fragment, String message) {
// Ignore
}
@Override
public void onYesButtonPressed(PopUpFragment thisFragment) {
Intent intent = new Intent(WifiManager.ACTION_PICK_WIFI_NETWORK);
startActivity(intent);
thisFragment.dismiss();
}
@Override
public void onNoButtonPressed(PopUpFragment thisFragment) {
thisFragment.dismiss();
}
@Override
public void onOkButtonPressed(PopUpFragment thisFragment, String action) {
// Ignore
}
};
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.NETWORK_DIALOG_ID, null, null);
fragment.setListener(listener);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void showNetworkDialog2() {
PopUpFragment fragment = PopUpFragment.newInstance(PopUpFragment.NETWORK_DIALOG_2_ID, null, null);
fragment.show(getSupportFragmentManager(), POPUP_DIALOG);
}
private void onSignInCallback() {
// Set current logged in user and get friend list
final Handler handler = new Handler();
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
final User user = getLoggedInUser();
setStatusLight(true);
if (user != null) {
handler.post(new Runnable() {
@Override
public void run() {
setUser(user);
// update user profile
showLoading();
updateUserProfile(getUser());
getFriendListFromServer();
}
});
}
}
});
}
private void onSignOutCallback() {
final Handler handler = new Handler();
ExecutorService service = Executors.newSingleThreadExecutor();
service.execute(new Runnable() {
@Override
public void run() {
final User user = getLastLoggedInUser();
setStatusLight(false);
if (user != null) {
handler.post(new Runnable() {
@Override
public void run() {
setUser(user);
updateUserProfile(getUser());
getFriendListFromServer();
}
});
} else {
setUser(null);
}
}
});
}
public List<ConversationMessage> getConversationMessages(Conversation conversation, int limit) {
List<ConversationMessage> messages = null;
if (mConnector != null) {
if (mConnector.getService() != null) {
messages = mConnector.getService().getSortedConversationMessages(conversation, limit);
}
}
return messages;
}
@Override
protected void onPause() {
super.onPause();
if (mConnector != null) {
if (mConnector.getService() != null)
mConnector.getService().setCallback(null);
}
}
@Override
protected void onResume() {
super.onResume();
if (mConnector != null) {
if (mConnector.getService() != null)
mConnector.getService().setCallback(mCommunicatorCallback);
}
}
}
|
|
package in.srain.cube.views.loadmore;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.AbsListView;
import android.widget.LinearLayout;
/**
* @author huqiu.lhq
*/
public abstract class LoadMoreContainerBase extends LinearLayout implements LoadMoreContainer {
private AbsListView.OnScrollListener mOnScrollListener;
private LoadMoreUIHandler mLoadMoreUIHandler;
private LoadMoreHandler mLoadMoreHandler;
private boolean mIsLoading;
private boolean mHasMore = false;
private boolean mAutoLoadMore = true;
private boolean mLoadError = false;
private boolean mListEmpty = true;
private boolean mShowLoadingForFirstPage = false;
private View mFooterView;
private AbsListView mAbsListView;
public LoadMoreContainerBase(Context context) {
super(context);
}
public LoadMoreContainerBase(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mAbsListView = retrieveAbsListView();
init();
}
/**
* @deprecated It's totally wrong. Use {@link #useDefaultFooter} instead.
*/
@Deprecated
public void useDefaultHeader() {
useDefaultFooter();
}
public void useDefaultFooter() {
LoadMoreDefaultFooterView footerView = new LoadMoreDefaultFooterView(getContext());
footerView.setVisibility(GONE);
setLoadMoreView(footerView);
setLoadMoreUIHandler(footerView);
}
private void init() {
if (mFooterView != null) {
addFooterView(mFooterView);
}
mAbsListView.setOnScrollListener(new AbsListView.OnScrollListener() {
private boolean mIsEnd = false;
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if (null != mOnScrollListener) {
mOnScrollListener.onScrollStateChanged(view, scrollState);
}
if (scrollState == SCROLL_STATE_IDLE) {
if (mIsEnd) {
onReachBottom();
}
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
if (null != mOnScrollListener) {
mOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount);
}
if (firstVisibleItem + visibleItemCount >= totalItemCount - 1) {
mIsEnd = true;
} else {
mIsEnd = false;
}
}
});
}
private void tryToPerformLoadMore() {
if (mIsLoading) {
return;
}
// no more content and also not load for first page
if (!mHasMore && !(mListEmpty && mShowLoadingForFirstPage)) {
return;
}
mIsLoading = true;
if (mLoadMoreUIHandler != null) {
mLoadMoreUIHandler.onLoading(this);
}
if (null != mLoadMoreHandler) {
mLoadMoreHandler.onLoadMore(this);
}
}
private void onReachBottom() {
// if has error, just leave what it should be
if (mLoadError) {
return;
}
if (mAutoLoadMore) {
tryToPerformLoadMore();
} else {
if (mHasMore) {
mLoadMoreUIHandler.onWaitToLoadMore(this);
}
}
}
@Override
public void setShowLoadingForFirstPage(boolean showLoading) {
mShowLoadingForFirstPage = showLoading;
}
@Override
public void setAutoLoadMore(boolean autoLoadMore) {
mAutoLoadMore = autoLoadMore;
}
@Override
public void setOnScrollListener(AbsListView.OnScrollListener l) {
mOnScrollListener = l;
}
@Override
public void setLoadMoreView(View view) {
// has not been initialized
if (mAbsListView == null) {
mFooterView = view;
return;
}
// remove previous
if (mFooterView != null && mFooterView != view) {
removeFooterView(view);
}
// add current
mFooterView = view;
mFooterView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
tryToPerformLoadMore();
}
});
addFooterView(view);
}
@Override
public void setLoadMoreUIHandler(LoadMoreUIHandler handler) {
mLoadMoreUIHandler = handler;
}
@Override
public void setLoadMoreHandler(LoadMoreHandler handler) {
mLoadMoreHandler = handler;
}
/**
* page has loaded
*
* @param emptyResult
* @param hasMore
*/
@Override
public void loadMoreFinish(boolean emptyResult, boolean hasMore) {
mLoadError = false;
mListEmpty = emptyResult;
mIsLoading = false;
mHasMore = hasMore;
if (mLoadMoreUIHandler != null) {
mLoadMoreUIHandler.onLoadFinish(this, emptyResult, hasMore);
}
}
@Override
public void loadMoreError(int errorCode, String errorMessage) {
mIsLoading = false;
mLoadError = true;
if (mLoadMoreUIHandler != null) {
mLoadMoreUIHandler.onLoadError(this, errorCode, errorMessage);
}
}
protected abstract void addFooterView(View view);
protected abstract void removeFooterView(View view);
protected abstract AbsListView retrieveAbsListView();
}
|
|
// Copyright 2017 Yahoo Inc.
// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms.
package com.yahoo.bard.webservice.web.ratelimit;
import com.yahoo.bard.webservice.application.MetricRegistryFactory;
import com.yahoo.bard.webservice.config.SystemConfig;
import com.yahoo.bard.webservice.config.SystemConfigException;
import com.yahoo.bard.webservice.config.SystemConfigProvider;
import com.yahoo.bard.webservice.util.Utils;
import com.yahoo.bard.webservice.web.DataApiRequestTypeIdentifier;
import com.yahoo.bard.webservice.web.RateLimiter;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.security.Principal;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.validation.constraints.NotNull;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.SecurityContext;
/**
* This is the default implementation of a rate limiter.
*/
public class DefaultRateLimiter implements RateLimiter {
private static final Logger LOG = LoggerFactory.getLogger(DefaultRateLimiter.class);
protected static final SystemConfig SYSTEM_CONFIG = SystemConfigProvider.getInstance();
protected static final RateLimitRequestToken REJECT_REQUEST_TOKEN =
new CallbackRateLimitRequestToken(false, () -> { });
protected static final RateLimitRequestToken BYPASS_TOKEN =
new BypassRateLimitRequestToken();
// Property names
protected static final @NotNull String REQUEST_LIMIT_GLOBAL_KEY =
SYSTEM_CONFIG.getPackageVariableName("request_limit_global");
protected static final @NotNull String REQUEST_LIMIT_PER_USER_KEY =
SYSTEM_CONFIG.getPackageVariableName("request_limit_per_user");
protected static final @NotNull String REQUEST_LIMIT_UI_KEY =
SYSTEM_CONFIG.getPackageVariableName("request_limit_ui");
// Default values
protected static final int DEFAULT_REQUEST_LIMIT_GLOBAL = 70;
protected static final int DEFAULT_REQUEST_LIMIT_PER_USER = 2;
protected static final int DEFAULT_REQUEST_LIMIT_UI = 52;
protected static final MetricRegistry REGISTRY = MetricRegistryFactory.getRegistry();
protected static final int DISABLED_RATE = -1;
// Request limits
protected final int requestLimitGlobal;
protected final int requestLimitPerUser;
protected final int requestLimitUi;
// Live count holders
protected final AtomicInteger globalCount = new AtomicInteger();
protected final Map<String, AtomicInteger> userCounts = new ConcurrentHashMap<>();
protected final Counter requestGlobalCounter;
protected final Counter usersCounter;
protected final Meter requestBypassMeter;
protected final Meter requestUiMeter;
protected final Meter requestUserMeter;
protected final Meter rejectUiMeter;
protected final Meter rejectUserMeter;
/**
* Loads defaults and creates DefaultRateLimiter.
*
* @throws SystemConfigException If any parameters fail to load
*/
public DefaultRateLimiter() throws SystemConfigException {
// Load limits
requestLimitGlobal = SYSTEM_CONFIG.getIntProperty(REQUEST_LIMIT_GLOBAL_KEY, DEFAULT_REQUEST_LIMIT_GLOBAL);
requestLimitPerUser = SYSTEM_CONFIG.getIntProperty(REQUEST_LIMIT_PER_USER_KEY, DEFAULT_REQUEST_LIMIT_PER_USER);
requestLimitUi = SYSTEM_CONFIG.getIntProperty(REQUEST_LIMIT_UI_KEY, DEFAULT_REQUEST_LIMIT_UI);
// Register counters for currently active requests
usersCounter = REGISTRY.counter("ratelimit.count.users");
requestGlobalCounter = REGISTRY.counter("ratelimit.count.global");
// Register meters for number of requests
requestUserMeter = REGISTRY.meter("ratelimit.meter.request.user");
requestUiMeter = REGISTRY.meter("ratelimit.meter.request.ui");
requestBypassMeter = REGISTRY.meter("ratelimit.meter.request.bypass");
rejectUserMeter = REGISTRY.meter("ratelimit.meter.reject.user");
rejectUiMeter = REGISTRY.meter("ratelimit.meter.reject.ui");
}
/**
* Get the current count for this username. If user does not have a counter, create one.
*
* @param request The request context
* @param isUIQuery Flag to check if it is a UI query
* @param userName Username to get the count for
*
* @return The atomic count for the user
*/
protected AtomicInteger getCount(ContainerRequestContext request, boolean isUIQuery, String userName) {
AtomicInteger count = userCounts.get(userName);
// Create a counter if we don't have one yet
if (count == null) {
userCounts.putIfAbsent(userName, new AtomicInteger());
count = userCounts.get(userName);
usersCounter.inc();
}
return count;
}
/**
* Increment the initial count and check if the count has gone over the request limit.
*
* @param initialCount Initial count that we're incrementing and checking against the limit
* @param requestLimit Limit to check the incremented initial count against
*
* @return True if the incremented count is less than or equal to the request limit, false if it's gone over and the
* request limit isn't the DISABLED_RATE (-1).
*/
protected boolean incrementAndCheckCount(AtomicInteger initialCount, int requestLimit) {
int count = initialCount.incrementAndGet();
if (count > requestLimit && requestLimit != DISABLED_RATE) {
initialCount.decrementAndGet();
LOG.info("reject: {} > {}", count, requestLimit);
return false;
}
return true;
}
/**
* Do the house keeping needed to reject the request.
*
* @param rejectMeter Meter to count the rejection in
* @param isRejectGlobal Whether or not the rejection is on the global rate limit
* @param isUIQuery Whether or not the request is a UI Query
* @param userName Username of the user who made the request
*/
protected void rejectRequest(Meter rejectMeter, boolean isRejectGlobal, boolean isUIQuery, String userName) {
rejectMeter.mark();
String limitType = isRejectGlobal ? "GLOBAL" : isUIQuery ? "UI" : "USER";
LOG.info("{} limit {}", limitType, userName);
}
@Override
public RateLimitRequestToken getToken(ContainerRequestContext request) {
MultivaluedMap<String, String> headers = Utils.headersToLowerCase(request.getHeaders());
if (
DataApiRequestTypeIdentifier.isBypass(headers) ||
DataApiRequestTypeIdentifier.isCorsPreflight(request.getMethod(), request.getSecurityContext())
) {
// Bypass and CORS Preflight requests are unlimited
requestBypassMeter.mark();
return BYPASS_TOKEN;
}
SecurityContext securityContext = request.getSecurityContext();
Principal user = securityContext == null ? null : securityContext.getUserPrincipal();
String userName = String.valueOf(user == null ? null : user.getName());
boolean isUIQuery = DataApiRequestTypeIdentifier.isUi(headers);
Meter requestMeter;
Meter rejectMeter;
int requestLimit;
if (isUIQuery) {
requestMeter = requestUiMeter;
rejectMeter = rejectUiMeter;
requestLimit = requestLimitUi;
} else {
requestMeter = requestUserMeter;
rejectMeter = rejectUserMeter;
requestLimit = requestLimitPerUser;
}
AtomicInteger count = getCount(request, isUIQuery, userName);
return createNewRateLimitRequestToken(count, userName, isUIQuery, requestLimit, requestMeter, rejectMeter);
}
/**
* Creates a new RateLimitRequestToken.
*
* @param count The atomic reference that holds the amount of in-flight requests the user owns
* @param userName The user who launched the request
* @param isUIQuery Whether or not this query was generated from the UI
* @param requestLimit The limit of requests the user is allowed to launch
* @param requestMeter Meter tracking the amount of requests that have been launched
* @param rejectMeter Meter tracking the amount of requests that have been rejected
*
* @return a new RateLimitRequestToken, representing an in-flight (or rejected) request that is tracked by the
* RateLimiter
*/
protected RateLimitRequestToken createNewRateLimitRequestToken(AtomicInteger count, String userName,
boolean isUIQuery, int requestLimit, Meter requestMeter, Meter rejectMeter) {
if (!incrementAndCheckCount(globalCount, requestLimitGlobal)) {
rejectRequest(rejectMeter, true, isUIQuery, userName);
return REJECT_REQUEST_TOKEN;
}
// Bind to the user
if (!incrementAndCheckCount(count, requestLimit)) {
// Decrement the global count that had already been incremented
globalCount.decrementAndGet();
rejectRequest(rejectMeter, false, isUIQuery, userName);
return REJECT_REQUEST_TOKEN;
}
// Measure the accepted request and current open connections
requestMeter.mark();
requestGlobalCounter.inc();
// Return new request token
RateLimitCleanupOnRequestComplete callback = generateCleanupClosure(count, userName);
return new CallbackRateLimitRequestToken(true, callback);
}
/**
* Creates a callback to be passed to a token to execute when a request has completed. The callback handles
* decrementing the global and user counters.
*
* @param count The AtomicInteger that stores the amount of in-flight requests an individual user owns
* @param userName The name of the user that made the request
*
* @return A callback implementation to be given to a CallbackRateLimitRequestToken
*/
protected RateLimitCleanupOnRequestComplete generateCleanupClosure(AtomicInteger count, String userName) {
return () -> {
if (globalCount.decrementAndGet() < 0) {
// Reset to 0 if it falls below 0
int old = globalCount.getAndSet(0);
LOG.error("Lost global count {} on user {}", old, userName);
}
if (count.decrementAndGet() < 0) {
// Reset to 0 if it falls below 0
int old = count.getAndSet(0);
LOG.error("Lost user count {} on user {}", old, userName);
throw new IllegalStateException("Lost user count");
}
};
}
}
|
|
package com.snoklecorp.fusedlocation;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PluginResult;
import org.apache.cordova.CallbackContext;
import org.json.JSONObject;
import org.json.JSONArray;
import org.json.JSONException;
import android.location.Location;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.text.TextUtils;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.content.Context;
import android.content.Intent;
import android.content.IntentSender;
import android.content.IntentSender.SendIntentException;
import android.content.SharedPreferences;
import android.content.DialogInterface;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.Status;
import com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks;
import com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationSettingsRequest;
import com.google.android.gms.location.LocationSettingsResult;
import com.google.android.gms.location.LocationSettingsStatusCodes;
public class FusedLocationHelper extends Activity implements GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener, ResultCallback<LocationSettingsResult> {
protected Activity mActivity = null;
protected static final String TAG = "fusedlocation-plugin";
protected CallbackContext mCallBackWhenGotLocation;
protected GoogleApiClient mGoogleApiClient;
protected LocationSettingsRequest mLocationSettingsRequest;
protected LocationRequest mLocationRequest;
protected static final int REQUEST_CHECK_SETTINGS = 0x1;
protected boolean mGetAddress;
public FusedLocationHelper(Activity activity) {
mActivity = activity;
}
public void GetLocation(CallbackContext cb) {
mGetAddress = false;
mCallBackWhenGotLocation = cb;
CheckForPlayServices();
SetupLocationFetching(cb);
}
public void GetAddress(CallbackContext cb) {
mGetAddress = true;
mCallBackWhenGotLocation = cb;
CheckForPlayServices();
SetupLocationFetching(cb);
}
protected void CheckForPlayServices() {
int status = GooglePlayServicesUtil.isGooglePlayServicesAvailable(mActivity);
if (status != ConnectionResult.SUCCESS) {
Dialog errorDialog = GooglePlayServicesUtil.getErrorDialog(status, mActivity, 10, new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
ErrorHappened("onCancel called on ErrorDialog. ");
}
});
if (errorDialog != null) {
errorDialog.show();
} else {
ErrorHappened("CheckForPlayServices failed. Error code: " + status);
}
}
}
protected void SetupLocationFetching(CallbackContext cb) {
buildGoogleApiClient();
createLocationRequest();
buildLocationSettingsRequest();
mGoogleApiClient.connect();
}
protected synchronized void buildGoogleApiClient() {
mGoogleApiClient = new GoogleApiClient.Builder(mActivity)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
}
protected void createLocationRequest() {
mLocationRequest = new LocationRequest();
mLocationRequest.setPriority(LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY);
}
protected void buildLocationSettingsRequest() {
LocationSettingsRequest.Builder builder = new LocationSettingsRequest.Builder();
builder.addLocationRequest(mLocationRequest);
mLocationSettingsRequest = builder.build();
}
protected void checkLocationSettings() {
PendingResult<LocationSettingsResult> result =
LocationServices.SettingsApi.checkLocationSettings(
mGoogleApiClient,
mLocationSettingsRequest
);
result.setResultCallback(this);
}
protected void GetLastLocation() {
Location lastLocation = LocationServices.FusedLocationApi.getLastLocation(mGoogleApiClient);
if (lastLocation != null) {
try {
JSONObject jsonLocation = new JSONObject();
jsonLocation.put("lat", String.valueOf(lastLocation.getLatitude()));
jsonLocation.put("lon", String.valueOf(lastLocation.getLongitude()));
if (mGetAddress) {
GetAddressFromLocation(lastLocation);
} else {
mCallBackWhenGotLocation.success(jsonLocation);
}
}
catch (JSONException ex) {
ErrorHappened("Error generating JSON from location");
}
} else {
ErrorHappened("no location available");
}
}
protected void GetAddressFromLocation(Location lastLocation) {
Geocoder geocoder = new Geocoder(mActivity, Locale.getDefault());
List<Address> addresses = null;
try {
addresses = geocoder.getFromLocation(
lastLocation.getLatitude(),
lastLocation.getLongitude(),
1);
} catch (IOException ioException) {
ErrorHappened("Service not available");
return;
} catch (IllegalArgumentException illegalArgumentException) {
ErrorHappened("Invalid location params used");
return;
}
// Handle case where no address was found.
if (addresses == null || addresses.size() == 0) {
ErrorHappened("No address found");
} else {
Address address = addresses.get(0);
ArrayList<String> addressFragments = new ArrayList<String>();
for(int i = 0; i < address.getMaxAddressLineIndex(); i++) {
addressFragments.add(address.getAddressLine(i));
}
mCallBackWhenGotLocation.success(TextUtils.join(System.getProperty("line.separator"), addressFragments));
}
}
@Override
public void onResult(LocationSettingsResult locationSettingsResult) {
final Status status = locationSettingsResult.getStatus();
switch (status.getStatusCode()) {
case LocationSettingsStatusCodes.SUCCESS:
Log.i(TAG, "All location settings are satisfied.");
GetLastLocation();
break;
case LocationSettingsStatusCodes.RESOLUTION_REQUIRED:
Log.i(TAG, "Location settings are not satisfied. Show the user a dialog to" +
"upgrade location settings ");
try {
// Show the dialog by calling startResolutionForResult(), and check the result
// in onActivityResult().
status.startResolutionForResult(mActivity, REQUEST_CHECK_SETTINGS);
} catch (IntentSender.SendIntentException e) {
ErrorHappened("PendingIntent unable to execute request.");
}
break;
case LocationSettingsStatusCodes.SETTINGS_CHANGE_UNAVAILABLE:
ErrorHappened("Location settings are inadequate, and cannot be fixed here. Dialog " +
"not created.");
break;
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "onActivityResult called with reqestCode " + requestCode + " and resultCode " +resultCode);
switch (requestCode) {
// Check for the integer request code originally supplied to startResolutionForResult().
case REQUEST_CHECK_SETTINGS:
switch (resultCode) {
case Activity.RESULT_OK:
Log.i(TAG, "User agreed to make required location settings changes.");
GetLastLocation();
break;
case Activity.RESULT_CANCELED:
ErrorHappened("User chose not to make required location settings changes.");
break;
}
break;
}
}
@Override
public void onConnected(Bundle connectionHint) {
checkLocationSettings();
}
@Override
public void onConnectionFailed(ConnectionResult result) {
ErrorHappened("onConnectionFailed. Error code: " + result.getErrorCode());
}
@Override
public void onConnectionSuspended(int cause) {
// The connection to Google Play services was lost for some reason. We call connect() to
// attempt to re-establish the connection.
// Log.i(TAG, "Connection suspended");
mGoogleApiClient.connect();
}
protected void ErrorHappened(String msg) {
Log.i(TAG, msg);
mCallBackWhenGotLocation.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, msg));
}
}
|
|
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.demo;
import com.google.android.exoplayer.AspectRatioFrameLayout;
import com.google.android.exoplayer.ExoPlaybackException;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer.MediaFormat;
import com.google.android.exoplayer.audio.AudioCapabilities;
import com.google.android.exoplayer.audio.AudioCapabilitiesReceiver;
import com.google.android.exoplayer.demo.player.DashRendererBuilder;
import com.google.android.exoplayer.demo.player.DemoPlayer;
import com.google.android.exoplayer.demo.player.DemoPlayer.RendererBuilder;
import com.google.android.exoplayer.demo.player.ExtractorRendererBuilder;
import com.google.android.exoplayer.demo.player.HlsRendererBuilder;
import com.google.android.exoplayer.demo.player.SmoothStreamingRendererBuilder;
import com.google.android.exoplayer.drm.UnsupportedDrmException;
import com.google.android.exoplayer.metadata.id3.GeobFrame;
import com.google.android.exoplayer.metadata.id3.Id3Frame;
import com.google.android.exoplayer.metadata.id3.PrivFrame;
import com.google.android.exoplayer.metadata.id3.TxxxFrame;
import com.google.android.exoplayer.text.CaptionStyleCompat;
import com.google.android.exoplayer.text.Cue;
import com.google.android.exoplayer.text.SubtitleLayout;
import com.google.android.exoplayer.util.DebugTextViewHelper;
import com.google.android.exoplayer.util.MimeTypes;
import com.google.android.exoplayer.util.Util;
import com.google.android.exoplayer.util.VerboseLogUtil;
import android.Manifest.permission;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnKeyListener;
import android.view.View.OnTouchListener;
import android.view.accessibility.CaptioningManager;
import android.widget.Button;
import android.widget.MediaController;
import android.widget.PopupMenu;
import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.TextView;
import android.widget.Toast;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookiePolicy;
import java.util.List;
import java.util.Locale;
/**
* An activity that plays media using {@link DemoPlayer}.
*/
public class PlayerActivity extends Activity implements SurfaceHolder.Callback, OnClickListener,
DemoPlayer.Listener, DemoPlayer.CaptionListener, DemoPlayer.Id3MetadataListener,
AudioCapabilitiesReceiver.Listener {
// For use within demo app code.
public static final String CONTENT_ID_EXTRA = "content_id";
public static final String CONTENT_TYPE_EXTRA = "content_type";
public static final String PROVIDER_EXTRA = "provider";
// For use when launching the demo app using adb.
private static final String CONTENT_EXT_EXTRA = "type";
private static final String TAG = "PlayerActivity";
private static final int MENU_GROUP_TRACKS = 1;
private static final int ID_OFFSET = 2;
private static final CookieManager defaultCookieManager;
static {
defaultCookieManager = new CookieManager();
defaultCookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ORIGINAL_SERVER);
}
private EventLogger eventLogger;
private MediaController mediaController;
private View debugRootView;
private View shutterView;
private AspectRatioFrameLayout videoFrame;
private SurfaceView surfaceView;
private TextView debugTextView;
private TextView playerStateTextView;
private SubtitleLayout subtitleLayout;
private Button videoButton;
private Button audioButton;
private Button textButton;
private Button retryButton;
private DemoPlayer player;
private DebugTextViewHelper debugViewHelper;
private boolean playerNeedsPrepare;
private long playerPosition;
private boolean enableBackgroundAudio;
private Uri contentUri;
private int contentType;
private String contentId;
private String provider;
private AudioCapabilitiesReceiver audioCapabilitiesReceiver;
// Activity lifecycle
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.player_activity);
View root = findViewById(R.id.root);
root.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
if (motionEvent.getAction() == MotionEvent.ACTION_DOWN) {
toggleControlsVisibility();
} else if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
view.performClick();
}
return true;
}
});
root.setOnKeyListener(new OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK || keyCode == KeyEvent.KEYCODE_ESCAPE
|| keyCode == KeyEvent.KEYCODE_MENU) {
return false;
}
return mediaController.dispatchKeyEvent(event);
}
});
shutterView = findViewById(R.id.shutter);
debugRootView = findViewById(R.id.controls_root);
videoFrame = (AspectRatioFrameLayout) findViewById(R.id.video_frame);
surfaceView = (SurfaceView) findViewById(R.id.surface_view);
surfaceView.getHolder().addCallback(this);
debugTextView = (TextView) findViewById(R.id.debug_text_view);
playerStateTextView = (TextView) findViewById(R.id.player_state_view);
subtitleLayout = (SubtitleLayout) findViewById(R.id.subtitles);
mediaController = new KeyCompatibleMediaController(this);
mediaController.setAnchorView(root);
retryButton = (Button) findViewById(R.id.retry_button);
retryButton.setOnClickListener(this);
videoButton = (Button) findViewById(R.id.video_controls);
audioButton = (Button) findViewById(R.id.audio_controls);
textButton = (Button) findViewById(R.id.text_controls);
CookieHandler currentHandler = CookieHandler.getDefault();
if (currentHandler != defaultCookieManager) {
CookieHandler.setDefault(defaultCookieManager);
}
audioCapabilitiesReceiver = new AudioCapabilitiesReceiver(this, this);
audioCapabilitiesReceiver.register();
}
@Override
public void onNewIntent(Intent intent) {
releasePlayer();
playerPosition = 0;
setIntent(intent);
}
@Override
public void onStart() {
super.onStart();
if (Util.SDK_INT > 23) {
onShown();
}
}
@Override
public void onResume() {
super.onResume();
if (Util.SDK_INT <= 23 || player == null) {
onShown();
}
}
private void onShown() {
Intent intent = getIntent();
contentUri = intent.getData();
contentType = intent.getIntExtra(CONTENT_TYPE_EXTRA,
inferContentType(contentUri, intent.getStringExtra(CONTENT_EXT_EXTRA)));
contentId = intent.getStringExtra(CONTENT_ID_EXTRA);
provider = intent.getStringExtra(PROVIDER_EXTRA);
configureSubtitleView();
if (player == null) {
if (!maybeRequestPermission()) {
preparePlayer(true);
}
} else {
player.setBackgrounded(false);
}
}
@Override
public void onPause() {
super.onPause();
if (Util.SDK_INT <= 23) {
onHidden();
}
}
@Override
public void onStop() {
super.onStop();
if (Util.SDK_INT > 23) {
onHidden();
}
}
private void onHidden() {
if (!enableBackgroundAudio) {
releasePlayer();
} else {
player.setBackgrounded(true);
}
shutterView.setVisibility(View.VISIBLE);
}
@Override
public void onDestroy() {
super.onDestroy();
audioCapabilitiesReceiver.unregister();
releasePlayer();
}
// OnClickListener methods
@Override
public void onClick(View view) {
if (view == retryButton) {
preparePlayer(true);
}
}
// AudioCapabilitiesReceiver.Listener methods
@Override
public void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities) {
if (player == null) {
return;
}
boolean backgrounded = player.getBackgrounded();
boolean playWhenReady = player.getPlayWhenReady();
releasePlayer();
preparePlayer(playWhenReady);
player.setBackgrounded(backgrounded);
}
// Permission request listener method
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions,
int[] grantResults) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
preparePlayer(true);
} else {
Toast.makeText(getApplicationContext(), R.string.storage_permission_denied,
Toast.LENGTH_LONG).show();
finish();
}
}
// Permission management methods
/**
* Checks whether it is necessary to ask for permission to read storage. If necessary, it also
* requests permission.
*
* @return true if a permission request is made. False if it is not necessary.
*/
@TargetApi(23)
private boolean maybeRequestPermission() {
if (requiresPermission(contentUri)) {
requestPermissions(new String[] {permission.READ_EXTERNAL_STORAGE}, 0);
return true;
} else {
return false;
}
}
@TargetApi(23)
private boolean requiresPermission(Uri uri) {
return Util.SDK_INT >= 23
&& Util.isLocalFileUri(uri)
&& checkSelfPermission(permission.READ_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED;
}
// Internal methods
private RendererBuilder getRendererBuilder() {
String userAgent = Util.getUserAgent(this, "ExoPlayerDemo");
switch (contentType) {
case Util.TYPE_SS:
return new SmoothStreamingRendererBuilder(this, userAgent, contentUri.toString(),
new SmoothStreamingTestMediaDrmCallback());
case Util.TYPE_DASH:
return new DashRendererBuilder(this, userAgent, contentUri.toString(),
new WidevineTestMediaDrmCallback(contentId, provider));
case Util.TYPE_HLS:
return new HlsRendererBuilder(this, userAgent, contentUri.toString());
case Util.TYPE_OTHER:
return new ExtractorRendererBuilder(this, userAgent, contentUri);
default:
throw new IllegalStateException("Unsupported type: " + contentType);
}
}
private void preparePlayer(boolean playWhenReady) {
if (player == null) {
player = new DemoPlayer(getRendererBuilder());
player.addListener(this);
player.setCaptionListener(this);
player.setMetadataListener(this);
player.seekTo(playerPosition);
playerNeedsPrepare = true;
mediaController.setMediaPlayer(player.getPlayerControl());
mediaController.setEnabled(true);
eventLogger = new EventLogger();
eventLogger.startSession();
player.addListener(eventLogger);
player.setInfoListener(eventLogger);
player.setInternalErrorListener(eventLogger);
debugViewHelper = new DebugTextViewHelper(player, debugTextView);
debugViewHelper.start();
}
if (playerNeedsPrepare) {
player.prepare();
playerNeedsPrepare = false;
updateButtonVisibilities();
}
player.setSurface(surfaceView.getHolder().getSurface());
player.setPlayWhenReady(playWhenReady);
}
private void releasePlayer() {
if (player != null) {
debugViewHelper.stop();
debugViewHelper = null;
playerPosition = player.getCurrentPosition();
player.release();
player = null;
eventLogger.endSession();
eventLogger = null;
}
}
// DemoPlayer.Listener implementation
@Override
public void onStateChanged(boolean playWhenReady, int playbackState) {
if (playbackState == ExoPlayer.STATE_ENDED) {
showControls();
}
String text = "playWhenReady=" + playWhenReady + ", playbackState=";
switch(playbackState) {
case ExoPlayer.STATE_BUFFERING:
text += "buffering";
break;
case ExoPlayer.STATE_ENDED:
text += "ended";
break;
case ExoPlayer.STATE_IDLE:
text += "idle";
break;
case ExoPlayer.STATE_PREPARING:
text += "preparing";
break;
case ExoPlayer.STATE_READY:
text += "ready";
break;
default:
text += "unknown";
break;
}
playerStateTextView.setText(text);
updateButtonVisibilities();
}
@Override
public void onError(Exception e) {
String errorString = null;
if (e instanceof UnsupportedDrmException) {
// Special case DRM failures.
UnsupportedDrmException unsupportedDrmException = (UnsupportedDrmException) e;
errorString = getString(Util.SDK_INT < 18 ? R.string.error_drm_not_supported
: unsupportedDrmException.reason == UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME
? R.string.error_drm_unsupported_scheme : R.string.error_drm_unknown);
} else if (e instanceof ExoPlaybackException
&& e.getCause() instanceof DecoderInitializationException) {
// Special case for decoder initialization failures.
DecoderInitializationException decoderInitializationException =
(DecoderInitializationException) e.getCause();
if (decoderInitializationException.decoderName == null) {
if (decoderInitializationException.getCause() instanceof DecoderQueryException) {
errorString = getString(R.string.error_querying_decoders);
} else if (decoderInitializationException.secureDecoderRequired) {
errorString = getString(R.string.error_no_secure_decoder,
decoderInitializationException.mimeType);
} else {
errorString = getString(R.string.error_no_decoder,
decoderInitializationException.mimeType);
}
} else {
errorString = getString(R.string.error_instantiating_decoder,
decoderInitializationException.decoderName);
}
}
if (errorString != null) {
Toast.makeText(getApplicationContext(), errorString, Toast.LENGTH_LONG).show();
}
playerNeedsPrepare = true;
updateButtonVisibilities();
showControls();
}
@Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthAspectRatio) {
shutterView.setVisibility(View.GONE);
videoFrame.setAspectRatio(
height == 0 ? 1 : (width * pixelWidthAspectRatio) / height);
}
// User controls
private void updateButtonVisibilities() {
retryButton.setVisibility(playerNeedsPrepare ? View.VISIBLE : View.GONE);
videoButton.setVisibility(haveTracks(DemoPlayer.TYPE_VIDEO) ? View.VISIBLE : View.GONE);
audioButton.setVisibility(haveTracks(DemoPlayer.TYPE_AUDIO) ? View.VISIBLE : View.GONE);
textButton.setVisibility(haveTracks(DemoPlayer.TYPE_TEXT) ? View.VISIBLE : View.GONE);
}
private boolean haveTracks(int type) {
return player != null && player.getTrackCount(type) > 0;
}
public void showVideoPopup(View v) {
PopupMenu popup = new PopupMenu(this, v);
configurePopupWithTracks(popup, null, DemoPlayer.TYPE_VIDEO);
popup.show();
}
public void showAudioPopup(View v) {
PopupMenu popup = new PopupMenu(this, v);
Menu menu = popup.getMenu();
menu.add(Menu.NONE, Menu.NONE, Menu.NONE, R.string.enable_background_audio);
final MenuItem backgroundAudioItem = menu.findItem(0);
backgroundAudioItem.setCheckable(true);
backgroundAudioItem.setChecked(enableBackgroundAudio);
OnMenuItemClickListener clickListener = new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if (item == backgroundAudioItem) {
enableBackgroundAudio = !item.isChecked();
return true;
}
return false;
}
};
configurePopupWithTracks(popup, clickListener, DemoPlayer.TYPE_AUDIO);
popup.show();
}
public void showTextPopup(View v) {
PopupMenu popup = new PopupMenu(this, v);
configurePopupWithTracks(popup, null, DemoPlayer.TYPE_TEXT);
popup.show();
}
public void showVerboseLogPopup(View v) {
PopupMenu popup = new PopupMenu(this, v);
Menu menu = popup.getMenu();
menu.add(Menu.NONE, 0, Menu.NONE, R.string.logging_normal);
menu.add(Menu.NONE, 1, Menu.NONE, R.string.logging_verbose);
menu.setGroupCheckable(Menu.NONE, true, true);
menu.findItem((VerboseLogUtil.areAllTagsEnabled()) ? 1 : 0).setChecked(true);
popup.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
if (item.getItemId() == 0) {
VerboseLogUtil.setEnableAllTags(false);
} else {
VerboseLogUtil.setEnableAllTags(true);
}
return true;
}
});
popup.show();
}
private void configurePopupWithTracks(PopupMenu popup,
final OnMenuItemClickListener customActionClickListener,
final int trackType) {
if (player == null) {
return;
}
int trackCount = player.getTrackCount(trackType);
if (trackCount == 0) {
return;
}
popup.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
return (customActionClickListener != null
&& customActionClickListener.onMenuItemClick(item))
|| onTrackItemClick(item, trackType);
}
});
Menu menu = popup.getMenu();
// ID_OFFSET ensures we avoid clashing with Menu.NONE (which equals 0).
menu.add(MENU_GROUP_TRACKS, DemoPlayer.TRACK_DISABLED + ID_OFFSET, Menu.NONE, R.string.off);
for (int i = 0; i < trackCount; i++) {
menu.add(MENU_GROUP_TRACKS, i + ID_OFFSET, Menu.NONE,
buildTrackName(player.getTrackFormat(trackType, i)));
}
menu.setGroupCheckable(MENU_GROUP_TRACKS, true, true);
menu.findItem(player.getSelectedTrack(trackType) + ID_OFFSET).setChecked(true);
}
private static String buildTrackName(MediaFormat format) {
if (format.adaptive) {
return "auto";
}
String trackName;
if (MimeTypes.isVideo(format.mimeType)) {
trackName = joinWithSeparator(joinWithSeparator(buildResolutionString(format),
buildBitrateString(format)), buildTrackIdString(format));
} else if (MimeTypes.isAudio(format.mimeType)) {
trackName = joinWithSeparator(joinWithSeparator(joinWithSeparator(buildLanguageString(format),
buildAudioPropertyString(format)), buildBitrateString(format)),
buildTrackIdString(format));
} else {
trackName = joinWithSeparator(joinWithSeparator(buildLanguageString(format),
buildBitrateString(format)), buildTrackIdString(format));
}
return trackName.length() == 0 ? "unknown" : trackName;
}
private static String buildResolutionString(MediaFormat format) {
return format.width == MediaFormat.NO_VALUE || format.height == MediaFormat.NO_VALUE
? "" : format.width + "x" + format.height;
}
private static String buildAudioPropertyString(MediaFormat format) {
return format.channelCount == MediaFormat.NO_VALUE || format.sampleRate == MediaFormat.NO_VALUE
? "" : format.channelCount + "ch, " + format.sampleRate + "Hz";
}
private static String buildLanguageString(MediaFormat format) {
return TextUtils.isEmpty(format.language) || "und".equals(format.language) ? ""
: format.language;
}
private static String buildBitrateString(MediaFormat format) {
return format.bitrate == MediaFormat.NO_VALUE ? ""
: String.format(Locale.US, "%.2fMbit", format.bitrate / 1000000f);
}
private static String joinWithSeparator(String first, String second) {
return first.length() == 0 ? second : (second.length() == 0 ? first : first + ", " + second);
}
private static String buildTrackIdString(MediaFormat format) {
return format.trackId == null ? "" : " (" + format.trackId + ")";
}
private boolean onTrackItemClick(MenuItem item, int type) {
if (player == null || item.getGroupId() != MENU_GROUP_TRACKS) {
return false;
}
player.setSelectedTrack(type, item.getItemId() - ID_OFFSET);
return true;
}
private void toggleControlsVisibility() {
if (mediaController.isShowing()) {
mediaController.hide();
debugRootView.setVisibility(View.GONE);
} else {
showControls();
}
}
private void showControls() {
mediaController.show(0);
debugRootView.setVisibility(View.VISIBLE);
}
// DemoPlayer.CaptionListener implementation
@Override
public void onCues(List<Cue> cues) {
subtitleLayout.setCues(cues);
}
// DemoPlayer.MetadataListener implementation
@Override
public void onId3Metadata(List<Id3Frame> id3Frames) {
for (Id3Frame id3Frame : id3Frames) {
if (id3Frame instanceof TxxxFrame) {
TxxxFrame txxxFrame = (TxxxFrame) id3Frame;
Log.i(TAG, String.format("ID3 TimedMetadata %s: description=%s, value=%s", txxxFrame.id,
txxxFrame.description, txxxFrame.value));
} else if (id3Frame instanceof PrivFrame) {
PrivFrame privFrame = (PrivFrame) id3Frame;
Log.i(TAG, String.format("ID3 TimedMetadata %s: owner=%s", privFrame.id, privFrame.owner));
} else if (id3Frame instanceof GeobFrame) {
GeobFrame geobFrame = (GeobFrame) id3Frame;
Log.i(TAG, String.format("ID3 TimedMetadata %s: mimeType=%s, filename=%s, description=%s",
geobFrame.id, geobFrame.mimeType, geobFrame.filename, geobFrame.description));
} else {
Log.i(TAG, String.format("ID3 TimedMetadata %s", id3Frame.id));
}
}
}
// SurfaceHolder.Callback implementation
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (player != null) {
player.setSurface(holder.getSurface());
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// Do nothing.
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (player != null) {
player.blockingClearSurface();
}
}
private void configureSubtitleView() {
CaptionStyleCompat style;
float fontScale;
if (Util.SDK_INT >= 19) {
style = getUserCaptionStyleV19();
fontScale = getUserCaptionFontScaleV19();
} else {
style = CaptionStyleCompat.DEFAULT;
fontScale = 1.0f;
}
subtitleLayout.setStyle(style);
subtitleLayout.setFractionalTextSize(SubtitleLayout.DEFAULT_TEXT_SIZE_FRACTION * fontScale);
}
@TargetApi(19)
private float getUserCaptionFontScaleV19() {
CaptioningManager captioningManager =
(CaptioningManager) getSystemService(Context.CAPTIONING_SERVICE);
return captioningManager.getFontScale();
}
@TargetApi(19)
private CaptionStyleCompat getUserCaptionStyleV19() {
CaptioningManager captioningManager =
(CaptioningManager) getSystemService(Context.CAPTIONING_SERVICE);
return CaptionStyleCompat.createFromCaptionStyle(captioningManager.getUserStyle());
}
/**
* Makes a best guess to infer the type from a media {@link Uri} and an optional overriding file
* extension.
*
* @param uri The {@link Uri} of the media.
* @param fileExtension An overriding file extension.
* @return The inferred type.
*/
private static int inferContentType(Uri uri, String fileExtension) {
String lastPathSegment = !TextUtils.isEmpty(fileExtension) ? "." + fileExtension
: uri.getLastPathSegment();
return Util.inferContentType(lastPathSegment);
}
private static final class KeyCompatibleMediaController extends MediaController {
private MediaController.MediaPlayerControl playerControl;
public KeyCompatibleMediaController(Context context) {
super(context);
}
@Override
public void setMediaPlayer(MediaController.MediaPlayerControl playerControl) {
super.setMediaPlayer(playerControl);
this.playerControl = playerControl;
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
int keyCode = event.getKeyCode();
if (playerControl.canSeekForward() && (keyCode == KeyEvent.KEYCODE_MEDIA_FAST_FORWARD
|| keyCode == KeyEvent.KEYCODE_DPAD_RIGHT)) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
playerControl.seekTo(playerControl.getCurrentPosition() + 15000); // milliseconds
show();
}
return true;
} else if (playerControl.canSeekBackward() && (keyCode == KeyEvent.KEYCODE_MEDIA_REWIND
|| keyCode == KeyEvent.KEYCODE_DPAD_LEFT)) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
playerControl.seekTo(playerControl.getCurrentPosition() - 5000); // milliseconds
show();
}
return true;
}
return super.dispatchKeyEvent(event);
}
}
}
|
|
package de.danoeh.antennapod.parser.feed.namespace;
import android.text.TextUtils;
import android.util.Log;
import de.danoeh.antennapod.model.feed.FeedFunding;
import de.danoeh.antennapod.parser.feed.HandlerState;
import de.danoeh.antennapod.parser.feed.element.AtomText;
import de.danoeh.antennapod.parser.feed.util.DateUtils;
import de.danoeh.antennapod.parser.feed.util.SyndStringUtils;
import org.xml.sax.Attributes;
import de.danoeh.antennapod.model.feed.FeedItem;
import de.danoeh.antennapod.model.feed.FeedMedia;
import de.danoeh.antennapod.parser.feed.element.SyndElement;
import de.danoeh.antennapod.parser.feed.util.SyndTypeUtils;
public class Atom extends Namespace {
private static final String TAG = "NSAtom";
public static final String NSTAG = "atom";
public static final String NSURI = "http://www.w3.org/2005/Atom";
private static final String FEED = "feed";
private static final String ID = "id";
private static final String TITLE = "title";
private static final String ENTRY = "entry";
private static final String LINK = "link";
private static final String UPDATED = "updated";
private static final String AUTHOR = "author";
private static final String AUTHOR_NAME = "name";
private static final String CONTENT = "content";
private static final String SUMMARY = "summary";
private static final String IMAGE_LOGO = "logo";
private static final String IMAGE_ICON = "icon";
private static final String SUBTITLE = "subtitle";
private static final String PUBLISHED = "published";
private static final String TEXT_TYPE = "type";
// Link
private static final String LINK_HREF = "href";
private static final String LINK_REL = "rel";
private static final String LINK_TYPE = "type";
private static final String LINK_TITLE = "title";
private static final String LINK_LENGTH = "length";
// rel-values
private static final String LINK_REL_ALTERNATE = "alternate";
private static final String LINK_REL_ARCHIVES = "archives";
private static final String LINK_REL_ENCLOSURE = "enclosure";
private static final String LINK_REL_PAYMENT = "payment";
private static final String LINK_REL_NEXT = "next";
// type-values
private static final String LINK_TYPE_ATOM = "application/atom+xml";
private static final String LINK_TYPE_HTML = "text/html";
private static final String LINK_TYPE_XHTML = "application/xml+xhtml";
private static final String LINK_TYPE_RSS = "application/rss+xml";
/**
* Regexp to test whether an Element is a Text Element.
*/
private static final String isText = TITLE + "|" + CONTENT + "|"
+ SUBTITLE + "|" + SUMMARY;
private static final String isFeed = FEED + "|" + Rss20.CHANNEL;
private static final String isFeedItem = ENTRY + "|" + Rss20.ITEM;
@Override
public SyndElement handleElementStart(String localName, HandlerState state,
Attributes attributes) {
if (ENTRY.equals(localName)) {
state.setCurrentItem(new FeedItem());
state.getItems().add(state.getCurrentItem());
state.getCurrentItem().setFeed(state.getFeed());
} else if (localName.matches(isText)) {
String type = attributes.getValue(TEXT_TYPE);
return new AtomText(localName, this, type);
} else if (LINK.equals(localName)) {
String href = attributes.getValue(LINK_HREF);
String rel = attributes.getValue(LINK_REL);
SyndElement parent = state.getTagstack().peek();
if (parent.getName().matches(isFeedItem)) {
if (rel == null || LINK_REL_ALTERNATE.equals(rel)) {
state.getCurrentItem().setLink(href);
} else if (LINK_REL_ENCLOSURE.equals(rel)) {
String strSize = attributes.getValue(LINK_LENGTH);
long size = 0;
try {
if (strSize != null) {
size = Long.parseLong(strSize);
}
} catch (NumberFormatException e) {
Log.d(TAG, "Length attribute could not be parsed.");
}
String type = attributes.getValue(LINK_TYPE);
if (type == null) {
type = SyndTypeUtils.getMimeTypeFromUrl(href);
}
FeedItem currItem = state.getCurrentItem();
if (SyndTypeUtils.enclosureTypeValid(type) && currItem != null && !currItem.hasMedia()) {
currItem.setMedia(new FeedMedia(currItem, href, size, type));
}
} else if (LINK_REL_PAYMENT.equals(rel)) {
state.getCurrentItem().setPaymentLink(href);
}
} else if (parent.getName().matches(isFeed)) {
if (rel == null || LINK_REL_ALTERNATE.equals(rel)) {
String type = attributes.getValue(LINK_TYPE);
/*
* Use as link if a) no type-attribute is given and
* feed-object has no link yet b) type of link is
* LINK_TYPE_HTML or LINK_TYPE_XHTML
*/
if (state.getFeed() != null &&
((type == null && state.getFeed().getLink() == null) ||
(LINK_TYPE_HTML.equals(type) || LINK_TYPE_XHTML.equals(type)))) {
state.getFeed().setLink(href);
} else if (LINK_TYPE_ATOM.equals(type) || LINK_TYPE_RSS.equals(type)) {
// treat as podlove alternate feed
String title = attributes.getValue(LINK_TITLE);
if (TextUtils.isEmpty(title)) {
title = href;
}
state.addAlternateFeedUrl(title, href);
}
} else if (LINK_REL_ARCHIVES.equals(rel) && state.getFeed() != null) {
String type = attributes.getValue(LINK_TYPE);
if (LINK_TYPE_ATOM.equals(type) || LINK_TYPE_RSS.equals(type)) {
String title = attributes.getValue(LINK_TITLE);
if (TextUtils.isEmpty(title)) {
title = href;
}
state.addAlternateFeedUrl(title, href);
} else if (LINK_TYPE_HTML.equals(type) || LINK_TYPE_XHTML.equals(type)) {
//A Link such as to a directory such as iTunes
}
} else if (LINK_REL_PAYMENT.equals(rel) && state.getFeed() != null) {
state.getFeed().addPayment(new FeedFunding(href, ""));
} else if (LINK_REL_NEXT.equals(rel) && state.getFeed() != null) {
state.getFeed().setPaged(true);
state.getFeed().setNextPageLink(href);
}
}
}
return new SyndElement(localName, this);
}
@Override
public void handleElementEnd(String localName, HandlerState state) {
if (ENTRY.equals(localName)) {
if (state.getCurrentItem() != null &&
state.getTempObjects().containsKey(Itunes.DURATION)) {
FeedItem currentItem = state.getCurrentItem();
if (currentItem.hasMedia()) {
Integer duration = (Integer) state.getTempObjects().get(Itunes.DURATION);
currentItem.getMedia().setDuration(duration);
}
state.getTempObjects().remove(Itunes.DURATION);
}
state.setCurrentItem(null);
}
if (state.getTagstack().size() >= 2) {
AtomText textElement = null;
String contentRaw;
if (state.getContentBuf() != null) {
contentRaw = state.getContentBuf().toString();
} else {
contentRaw = "";
}
String content = SyndStringUtils.trimAllWhitespace(contentRaw);
SyndElement topElement = state.getTagstack().peek();
String top = topElement.getName();
SyndElement secondElement = state.getSecondTag();
String second = secondElement.getName();
if (top.matches(isText)) {
textElement = (AtomText) topElement;
textElement.setContent(content);
}
if (ID.equals(top)) {
if (FEED.equals(second) && state.getFeed() != null) {
state.getFeed().setFeedIdentifier(contentRaw);
} else if (ENTRY.equals(second) && state.getCurrentItem() != null) {
state.getCurrentItem().setItemIdentifier(contentRaw);
}
} else if (TITLE.equals(top) && textElement != null) {
if (FEED.equals(second) && state.getFeed() != null) {
state.getFeed().setTitle(textElement.getProcessedContent());
} else if (ENTRY.equals(second) && state.getCurrentItem() != null) {
state.getCurrentItem().setTitle(textElement.getProcessedContent());
}
} else if (SUBTITLE.equals(top) && FEED.equals(second) && textElement != null &&
state.getFeed() != null) {
state.getFeed().setDescription(textElement.getProcessedContent());
} else if (CONTENT.equals(top) && ENTRY.equals(second) && textElement != null &&
state.getCurrentItem() != null) {
state.getCurrentItem().setDescriptionIfLonger(textElement.getProcessedContent());
} else if (SUMMARY.equals(top) && ENTRY.equals(second) && textElement != null
&& state.getCurrentItem() != null) {
state.getCurrentItem().setDescriptionIfLonger(textElement.getProcessedContent());
} else if (UPDATED.equals(top) && ENTRY.equals(second) && state.getCurrentItem() != null &&
state.getCurrentItem().getPubDate() == null) {
state.getCurrentItem().setPubDate(DateUtils.parseOrNullIfFuture(content));
} else if (PUBLISHED.equals(top) && ENTRY.equals(second) && state.getCurrentItem() != null) {
state.getCurrentItem().setPubDate(DateUtils.parseOrNullIfFuture(content));
} else if (IMAGE_LOGO.equals(top) && state.getFeed() != null && state.getFeed().getImageUrl() == null) {
state.getFeed().setImageUrl(content);
} else if (IMAGE_ICON.equals(top) && state.getFeed() != null) {
state.getFeed().setImageUrl(content);
} else if (AUTHOR_NAME.equals(top) && AUTHOR.equals(second) &&
state.getFeed() != null && state.getCurrentItem() == null) {
String currentName = state.getFeed().getAuthor();
if (currentName == null) {
state.getFeed().setAuthor(content);
} else {
state.getFeed().setAuthor(currentName + ", " + content);
}
}
}
}
}
|
|
package xyz.stepsecret.arrayproject3;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.location.Location;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.SearchView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.animation.GlideAnimation;
import com.bumptech.glide.request.target.SimpleTarget;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapView;
import com.google.android.gms.maps.MapsInitializer;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptor;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import java.util.ArrayList;
import java.util.HashMap;
import cn.pedant.SweetAlert.SweetAlertDialog;
import retrofit.Callback;
import retrofit.RestAdapter;
import retrofit.RetrofitError;
import retrofit.client.Response;
import xyz.stepsecret.arrayproject3.API.Near_API;
import xyz.stepsecret.arrayproject3.Config.ConfigData;
import xyz.stepsecret.arrayproject3.Model.Near_Model;
import xyz.stepsecret.arrayproject3.TabFragments.models.ShopSectionDataModel;
import xyz.stepsecret.arrayproject3.TabFragments.models.ShopSingleItemModel;
import xyz.stepsecret.arrayproject3.TinyDB.TinyDB;
/**
* Created by stepsecret on 14/8/2559.
*/
public class MapActivity extends AppCompatActivity implements OnMapReadyCallback, GoogleMap.OnMyLocationButtonClickListener,
GoogleMap.OnMarkerClickListener, GoogleApiClient.OnConnectionFailedListener, GoogleApiClient.ConnectionCallbacks,
LocationListener {
private Toolbar toolbar;
public GoogleMap gMap;
public MapView mMap;
private Location mLastLocation;
private final static int PLAY_SERVICES_RESOLUTION_REQUEST = 1000;
private GoogleApiClient mGoogleApiClient;
private boolean mRequestingLocationUpdates = false;
private LocationRequest mLocationRequest;
private static int UPDATE_INTERVAL = 10000; // 10 sec
private static int FATEST_INTERVAL = 5000; // 5 sec
private static int DISPLACEMENT = 10; // 10 meters
public static Marker Your_Marker;
public static Marker[] ST_Marker;
public static LatLng camera;
private HashMap<Marker, String> mHashMap = new HashMap<Marker, String>();
private RestAdapter restAdapter;
private TinyDB Store_data;
private SweetAlertDialog pDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
getSupportActionBar().setTitle(getResources().getString(R.string.map));
mMap = (MapView) findViewById(R.id.mapView);
mMap.onCreate(savedInstanceState);
mMap.getMapAsync(this);
Store_data = new TinyDB(this);
restAdapter = new RestAdapter.Builder()
.setEndpoint(ConfigData.API).build();
buildGoogleApiClient();
createLocationRequest();
}
/**
* Creating google api client object
* */
protected synchronized void buildGoogleApiClient() {
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.addApi(LocationServices.API)
.build();
}
}
/**
* Creating location request object
* */
protected void createLocationRequest() {
mLocationRequest = new LocationRequest();
mLocationRequest.setInterval(UPDATE_INTERVAL);
mLocationRequest.setFastestInterval(FATEST_INTERVAL);
mLocationRequest.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY);
mLocationRequest.setSmallestDisplacement(DISPLACEMENT);
}
/**
* Starting the location updates
* */
protected void startLocationUpdates() {
if (ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
LocationServices.FusedLocationApi.requestLocationUpdates(
mGoogleApiClient, mLocationRequest, this);
}
/**
* Stopping location updates
*/
protected void stopLocationUpdates() {
LocationServices.FusedLocationApi.removeLocationUpdates(
mGoogleApiClient, this);
}
@Override
public void onConnected(Bundle connectionHint) {
if (ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
mLastLocation = LocationServices.FusedLocationApi.getLastLocation(
mGoogleApiClient);
if (mLastLocation != null) {
pDialog = new SweetAlertDialog(this, SweetAlertDialog.PROGRESS_TYPE);
pDialog.getProgressHelper().setBarColor(Color.parseColor("#A5DC86"));
pDialog.setTitleText("Loading");
pDialog.setCancelable(false);
pDialog.show();
setCamera();
NearData(mLastLocation);
}
}
public void setCamera()
{
camera = new LatLng(mLastLocation.getLatitude(),mLastLocation.getLongitude());
gMap.animateCamera(CameraUpdateFactory.newLatLngZoom(camera, 15));
}
public void setMeMark()
{
//mHashMap.clear();
if(Your_Marker == null)
{
Your_Marker = gMap.addMarker(new MarkerOptions()
.position(new LatLng(mLastLocation.getLatitude(),mLastLocation.getLongitude()))
.icon(BitmapDescriptorFactory.fromResource(R.drawable.me))
.title("Your Here"));
//mHashMap.put(Your_Marker, "99");
}
else
{
Your_Marker.remove();
Your_Marker = gMap.addMarker(new MarkerOptions()
.position(new LatLng(mLastLocation.getLatitude(),mLastLocation.getLongitude()))
.icon(BitmapDescriptorFactory.fromResource(R.drawable.me))
.title("Your Here"));
//mHashMap.put(Your_Marker, "91");
}
}
public void setMarker(final String[] lat_long)
{
final Marker[] shop = new Marker[1];
Glide.with(this).load(ConfigData.Logo+lat_long[9])
.asBitmap()
.fitCenter()
.diskCacheStrategy(DiskCacheStrategy.ALL)
.override(60, 60)
.into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap bitmap, GlideAnimation<? super Bitmap> glideAnimation) {
BitmapDescriptor icon = BitmapDescriptorFactory.fromBitmap(bitmap);
shop[0] = gMap.addMarker(new MarkerOptions()
.position(new LatLng(Double.parseDouble(lat_long[6]),Double.parseDouble(lat_long[7])))
.icon(icon)
.title(lat_long[2]+" : "+lat_long[3]));
mHashMap.put(shop[0], lat_long[0]);
}
});
}
public void NearData(Location location)
{
final Near_API near_api = restAdapter.create(Near_API.class);
near_api.Get_NEAR_API(Store_data.getString("api_key"),location.getLatitude()+"",location.getLongitude()+"", new Callback<Near_Model>() {
@Override
public void success(Near_Model result, Response response) {
if(!result.getError() && result.getData().length > 0) {
String[][] TempData = result.getData();
for (int i = 0; i < TempData.length; i++)
{
// Log.e(" Map ",""+i);
String[] lat_long = TempData[i];
setMarker(lat_long);
}
pDialog.cancel();
}
else
{
pDialog.cancel();
show_failure(result.getMessage());
Log.e(" TAG ","error");
}
}
@Override
public void failure(RetrofitError error) {
pDialog.cancel();
show_failure(error.getMessage());
Log.e(" TAG ","failure ");
}
});
}
@Override
public void onConnectionSuspended(int i) {
mGoogleApiClient.connect();
}
@Override
public void onConnectionFailed(@NonNull ConnectionResult connectionResult) {
Log.e(" Map ", "Connection failed: ConnectionResult.getErrorCode() = "
+ connectionResult.getErrorCode());
}
@Override
public void onLocationChanged(Location location) {
mLastLocation = location;
Toast.makeText(getApplicationContext(), "Location changed!",
Toast.LENGTH_SHORT).show();
}
@Override
public void onStart() {
super.onStart();
if (mGoogleApiClient != null) {
mGoogleApiClient.connect();
}
}
@Override
public void onStop() {
super.onStop();
mGoogleApiClient.disconnect();
}
@Override
public void onResume() {
super.onResume();
mMap.onResume();
if (mGoogleApiClient.isConnected() && mRequestingLocationUpdates) {
startLocationUpdates();
}
}
@Override
public void onPause() {
super.onPause();
mMap.onPause();
}
@Override
public void onDestroy() {
super.onDestroy();
mMap.onDestroy();
}
@Override
public void onLowMemory() {
super.onLowMemory();
mMap.onLowMemory();
}
@Override
public void onMapReady(GoogleMap googleMap) {
// Do something with Google Map
MapsInitializer.initialize(this);
gMap = googleMap;
gMap.setOnMyLocationButtonClickListener(this);
gMap.setOnMarkerClickListener(this);
if (ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, android.Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
gMap.setMyLocationEnabled(true);
}
@Override
public boolean onMyLocationButtonClick() {
Log.e(" Shop ","onMyLocationButtonClick");
// Return false so that we don't consume the event and the default behavior still occurs
// (the camera animates to the user's current position).
return false;
}
@Override
public boolean onMarkerClick(Marker marker) {
String id_branch = mHashMap.get(marker);
Log.e(" Shop ","onMarkerClick "+id_branch);
Intent intent = new Intent(this, BookBranch.class);
intent.putExtra("id_branch", id_branch);
startActivity(intent);
return true;
}
@Override
public void onBackPressed() {
stopLocationUpdates();
finish();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
stopLocationUpdates();
finish();
return true;
}
public void show_failure(String message)
{
new SweetAlertDialog(this, SweetAlertDialog.ERROR_TYPE)
.setTitleText(message)
.show();
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.LocalTimeOffset.Gap;
import org.elasticsearch.common.LocalTimeOffset.Overlap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.IsoFields;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalQueries;
import java.time.zone.ZoneOffsetTransition;
import java.time.zone.ZoneRules;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
/**
* A strategy for rounding milliseconds since epoch.
* <p>
* There are two implementations for rounding.
* The first one requires a date time unit and rounds to the supplied date time unit (i.e. quarter of year, day of month).
* The second one allows you to specify an interval to round to.
* <p>
* See <a href="https://davecturner.github.io/2019/04/14/timezone-rounding.html">this</a>
* blog for some background reading. Its super interesting and the links are
* a comedy gold mine. If you like time zones. Or hate them.
*/
public abstract class Rounding implements Writeable {
public enum DateTimeUnit {
WEEK_OF_WEEKYEAR(
(byte) 1,
"week",
IsoFields.WEEK_OF_WEEK_BASED_YEAR,
true,
TimeUnit.DAYS.toMillis(7)
) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(7);
long roundFloor(long utcMillis) {
return DateUtils.roundWeekOfWeekYear(utcMillis);
}
@Override
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
},
YEAR_OF_CENTURY(
(byte) 2,
"year",
ChronoField.YEAR_OF_ERA,
false,
12
) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(366);
long roundFloor(long utcMillis) {
return DateUtils.roundYear(utcMillis);
}
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
},
QUARTER_OF_YEAR(
(byte) 3,
"quarter",
IsoFields.QUARTER_OF_YEAR,
false,
3
) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(92);
long roundFloor(long utcMillis) {
return DateUtils.roundQuarterOfYear(utcMillis);
}
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
},
MONTH_OF_YEAR(
(byte) 4,
"month",
ChronoField.MONTH_OF_YEAR,
false,
1
) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(31);
long roundFloor(long utcMillis) {
return DateUtils.roundMonthOfYear(utcMillis);
}
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
},
DAY_OF_MONTH(
(byte) 5,
"day",
ChronoField.DAY_OF_MONTH,
true,
ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis()
) {
long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, this.ratio);
}
long extraLocalOffsetLookup() {
return ratio;
}
},
HOUR_OF_DAY(
(byte) 6,
"hour",
ChronoField.HOUR_OF_DAY,
true,
ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis()
) {
long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, ratio);
}
long extraLocalOffsetLookup() {
return ratio;
}
},
MINUTES_OF_HOUR(
(byte) 7,
"minute",
ChronoField.MINUTE_OF_HOUR,
true,
ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis()
) {
long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, ratio);
}
long extraLocalOffsetLookup() {
return ratio;
}
},
SECOND_OF_MINUTE(
(byte) 8,
"second",
ChronoField.SECOND_OF_MINUTE,
true,
ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis()
) {
long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, ratio);
}
long extraLocalOffsetLookup() {
return ratio;
}
};
private final byte id;
private final TemporalField field;
private final boolean isMillisBased;
private final String shortName;
/**
* ratio to milliseconds if isMillisBased == true or to month otherwise
*/
protected final long ratio;
DateTimeUnit(byte id, String shortName, TemporalField field, boolean isMillisBased, long ratio) {
this.id = id;
this.shortName = shortName;
this.field = field;
this.isMillisBased = isMillisBased;
this.ratio = ratio;
}
/**
* This rounds down the supplied milliseconds since the epoch down to the next unit. In order to retain performance this method
* should be as fast as possible and not try to convert dates to java-time objects if possible
*
* @param utcMillis the milliseconds since the epoch
* @return the rounded down milliseconds since the epoch
*/
abstract long roundFloor(long utcMillis);
/**
* When looking up {@link LocalTimeOffset} go this many milliseconds
* in the past from the minimum millis since epoch that we plan to
* look up so that we can see transitions that we might have rounded
* down beyond.
*/
abstract long extraLocalOffsetLookup();
public byte getId() {
return id;
}
public TemporalField getField() {
return field;
}
public static DateTimeUnit resolve(String name) {
return DateTimeUnit.valueOf(name.toUpperCase(Locale.ROOT));
}
public String shortName() {
return shortName;
}
public static DateTimeUnit resolve(byte id) {
switch (id) {
case 1: return WEEK_OF_WEEKYEAR;
case 2: return YEAR_OF_CENTURY;
case 3: return QUARTER_OF_YEAR;
case 4: return MONTH_OF_YEAR;
case 5: return DAY_OF_MONTH;
case 6: return HOUR_OF_DAY;
case 7: return MINUTES_OF_HOUR;
case 8: return SECOND_OF_MINUTE;
default: throw new ElasticsearchException("Unknown date time unit id [" + id + "]");
}
}
}
public abstract void innerWriteTo(StreamOutput out) throws IOException;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(id());
innerWriteTo(out);
}
public abstract byte id();
/**
* A strategy for rounding milliseconds since epoch.
*/
public interface Prepared {
/**
* Rounds the given value.
*/
long round(long utcMillis);
/**
* Given the rounded value (which was potentially generated by
* {@link #round(long)}, returns the next rounding value. For
* example, with interval based rounding, if the interval is
* 3, {@code nextRoundValue(6) = 9}.
*/
long nextRoundingValue(long utcMillis);
/**
* Given the rounded value, returns the size between this value and the
* next rounded value in specified units if possible.
*/
double roundingSize(long utcMillis, DateTimeUnit timeUnit);
}
/**
* Prepare to round many times.
*/
public abstract Prepared prepare(long minUtcMillis, long maxUtcMillis);
/**
* Prepare to round many dates over an unknown range. Prefer
* {@link #prepare(long, long)} if you can find the range because
* it'll be much more efficient.
*/
public abstract Prepared prepareForUnknown();
/**
* Prepare rounding using java time classes. Package private for testing.
*/
abstract Prepared prepareJavaTime();
/**
* Rounds the given value.
* @deprecated Prefer {@link #prepare} and then {@link Prepared#round(long)}
*/
@Deprecated
public final long round(long utcMillis) {
return prepare(utcMillis, utcMillis).round(utcMillis);
}
/**
* Given the rounded value (which was potentially generated by
* {@link #round(long)}, returns the next rounding value. For
* example, with interval based rounding, if the interval is
* 3, {@code nextRoundValue(6) = 9}.
* @deprecated Prefer {@link #prepare} and then {@link Prepared#nextRoundingValue(long)}
*/
@Deprecated
public final long nextRoundingValue(long utcMillis) {
return prepare(utcMillis, utcMillis).nextRoundingValue(utcMillis);
}
/**
* How "offset" this rounding is from the traditional "start" of the period.
* @deprecated We're in the process of abstracting offset *into* Rounding
* so keep any usage to migratory shims
*/
@Deprecated
public abstract long offset();
/**
* Strip the {@code offset} from these bounds.
*/
public abstract Rounding withoutOffset();
@Override
public abstract boolean equals(Object obj);
@Override
public abstract int hashCode();
public static Builder builder(DateTimeUnit unit) {
return new Builder(unit);
}
public static Builder builder(TimeValue interval) {
return new Builder(interval);
}
public static class Builder {
private final DateTimeUnit unit;
private final long interval;
private ZoneId timeZone = ZoneOffset.UTC;
private long offset = 0;
public Builder(DateTimeUnit unit) {
this.unit = unit;
this.interval = -1;
}
public Builder(TimeValue interval) {
this.unit = null;
if (interval.millis() < 1)
throw new IllegalArgumentException("Zero or negative time interval not supported");
this.interval = interval.millis();
}
public Builder timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("Setting null as timezone is not supported");
}
this.timeZone = timeZone;
return this;
}
/**
* Sets the offset of this rounding from the normal beginning of the interval. Use this
* to start days at 6am or months on the 15th.
* @param offset the offset, in milliseconds
*/
public Builder offset(long offset) {
this.offset = offset;
return this;
}
public Rounding build() {
Rounding rounding;
if (unit != null) {
rounding = new TimeUnitRounding(unit, timeZone);
} else {
rounding = new TimeIntervalRounding(interval, timeZone);
}
if (offset != 0) {
rounding = new OffsetRounding(rounding, offset);
}
return rounding;
}
}
private abstract class PreparedRounding implements Prepared {
/**
* Attempt to build a {@link Prepared} implementation that relies on pre-calcuated
* "round down" points. If there would be more than {@code max} points then return
* the original implementation, otherwise return the new, faster implementation.
*/
protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) {
long[] values = new long[1];
long rounded = round(minUtcMillis);
int i = 0;
values[i++] = rounded;
while ((rounded = nextRoundingValue(rounded)) <= maxUtcMillis) {
if (i >= max) {
return this;
}
/*
* We expect a time in the last transition (rounded - 1) to round
* to the last value we calculated. If it doesn't then we're
* probably doing something wrong here....
*/
assert values[i - 1] == round(rounded - 1);
values = ArrayUtil.grow(values, i + 1);
values[i++]= rounded;
}
return new ArrayRounding(values, i, this);
}
}
static class TimeUnitRounding extends Rounding {
static final byte ID = 1;
private final DateTimeUnit unit;
private final ZoneId timeZone;
private final boolean unitRoundsToMidnight;
TimeUnitRounding(DateTimeUnit unit, ZoneId timeZone) {
this.unit = unit;
this.timeZone = timeZone;
this.unitRoundsToMidnight = this.unit.field.getBaseUnit().getDuration().toMillis() > 3600000L;
}
TimeUnitRounding(StreamInput in) throws IOException {
this(DateTimeUnit.resolve(in.readByte()), in.readZoneId());
}
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeByte(unit.getId());
out.writeZoneId(timeZone);
}
@Override
public byte id() {
return ID;
}
private LocalDateTime truncateLocalDateTime(LocalDateTime localDateTime) {
switch (unit) {
case SECOND_OF_MINUTE:
return localDateTime.withNano(0);
case MINUTES_OF_HOUR:
return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonthValue(), localDateTime.getDayOfMonth(),
localDateTime.getHour(), localDateTime.getMinute(), 0, 0);
case HOUR_OF_DAY:
return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonth(), localDateTime.getDayOfMonth(),
localDateTime.getHour(), 0, 0);
case DAY_OF_MONTH:
LocalDate localDate = localDateTime.query(TemporalQueries.localDate());
return localDate.atStartOfDay();
case WEEK_OF_WEEKYEAR:
return LocalDateTime.of(localDateTime.toLocalDate(), LocalTime.MIDNIGHT).with(ChronoField.DAY_OF_WEEK, 1);
case MONTH_OF_YEAR:
return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonthValue(), 1, 0, 0);
case QUARTER_OF_YEAR:
return LocalDateTime.of(localDateTime.getYear(), localDateTime.getMonth().firstMonthOfQuarter(), 1, 0, 0);
case YEAR_OF_CENTURY:
return LocalDateTime.of(LocalDate.of(localDateTime.getYear(), 1, 1), LocalTime.MIDNIGHT);
default:
throw new IllegalArgumentException("NOT YET IMPLEMENTED for unit " + unit);
}
}
@Override
public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
/*
* 128 is a power of two that isn't huge. We might be able to do
* better if the limit was based on the actual type of prepared
* rounding but this'll do for now.
*/
return prepareOffsetOrJavaTimeRounding(minUtcMillis, maxUtcMillis).maybeUseArray(minUtcMillis, maxUtcMillis, 128);
}
private TimeUnitPreparedRounding prepareOffsetOrJavaTimeRounding(long minUtcMillis, long maxUtcMillis) {
long minLookup = minUtcMillis - unit.extraLocalOffsetLookup();
long maxLookup = maxUtcMillis;
long unitMillis = 0;
if (false == unitRoundsToMidnight) {
/*
* Units that round to midnight can round down from two
* units worth of millis in the future to find the
* nextRoundingValue.
*/
unitMillis = unit.field.getBaseUnit().getDuration().toMillis();
maxLookup += 2 * unitMillis;
}
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(timeZone, minLookup, maxLookup);
if (lookup == null) {
// Range too long, just use java.time
return prepareJavaTime();
}
LocalTimeOffset fixedOffset = lookup.fixedInRange(minLookup, maxLookup);
if (fixedOffset != null) {
// The time zone is effectively fixed
if (unitRoundsToMidnight) {
return new FixedToMidnightRounding(fixedOffset);
}
return new FixedNotToMidnightRounding(fixedOffset, unitMillis);
}
if (unitRoundsToMidnight) {
return new ToMidnightRounding(lookup);
}
return new NotToMidnightRounding(lookup, unitMillis);
}
@Override
public Prepared prepareForUnknown() {
LocalTimeOffset offset = LocalTimeOffset.fixedOffset(timeZone);
if (offset != null) {
if (unitRoundsToMidnight) {
return new FixedToMidnightRounding(offset);
}
return new FixedNotToMidnightRounding(offset, unit.field.getBaseUnit().getDuration().toMillis());
}
return prepareJavaTime();
}
@Override
TimeUnitPreparedRounding prepareJavaTime() {
if (unitRoundsToMidnight) {
return new JavaTimeToMidnightRounding();
}
return new JavaTimeNotToMidnightRounding(unit.field.getBaseUnit().getDuration().toMillis());
}
@Override
public long offset() {
return 0;
}
@Override
public Rounding withoutOffset() {
return this;
}
@Override
public int hashCode() {
return Objects.hash(unit, timeZone);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TimeUnitRounding other = (TimeUnitRounding) obj;
return Objects.equals(unit, other.unit) && Objects.equals(timeZone, other.timeZone);
}
@Override
public String toString() {
return "Rounding[" + unit + " in " + timeZone + "]";
}
private abstract class TimeUnitPreparedRounding extends PreparedRounding {
@Override
public double roundingSize(long utcMillis, DateTimeUnit timeUnit) {
if (timeUnit.isMillisBased == unit.isMillisBased) {
return (double) unit.ratio / timeUnit.ratio;
} else {
if (unit.isMillisBased == false) {
return (double) (nextRoundingValue(utcMillis) - utcMillis) / timeUnit.ratio;
} else {
throw new IllegalArgumentException("Cannot use month-based rate unit [" + timeUnit.shortName +
"] with non-month based calendar interval histogram [" + unit.shortName +
"] only week, day, hour, minute and second are supported for this histogram");
}
}
}
}
private class FixedToMidnightRounding extends TimeUnitPreparedRounding {
private final LocalTimeOffset offset;
FixedToMidnightRounding(LocalTimeOffset offset) {
this.offset = offset;
}
@Override
public long round(long utcMillis) {
return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis)));
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is used in date range's collect so we should optimize it too
return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis);
}
}
private class FixedNotToMidnightRounding extends TimeUnitPreparedRounding {
private final LocalTimeOffset offset;
private final long unitMillis;
FixedNotToMidnightRounding(LocalTimeOffset offset, long unitMillis) {
this.offset = offset;
this.unitMillis = unitMillis;
}
@Override
public long round(long utcMillis) {
return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis)));
}
@Override
public final long nextRoundingValue(long utcMillis) {
return round(utcMillis + unitMillis);
}
}
private class ToMidnightRounding extends TimeUnitPreparedRounding implements LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
ToMidnightRounding(LocalTimeOffset.Lookup lookup) {
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
return offset.localToUtc(unit.roundFloor(offset.utcToLocalTime(utcMillis)), this);
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is actually used date range's collect so we should optimize it
return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis);
}
@Override
public long inGap(long localMillis, Gap gap) {
return gap.startUtcMillis();
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return gap.previous().localToUtc(localMillis, this);
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
}
@Override
protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) {
if (lookup.anyMoveBackToPreviousDay()) {
return this;
}
return super.maybeUseArray(minUtcMillis, maxUtcMillis, max);
}
}
private class NotToMidnightRounding extends AbstractNotToMidnightRounding implements LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
NotToMidnightRounding(LocalTimeOffset.Lookup lookup, long unitMillis) {
super(unitMillis);
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
long roundedLocalMillis = unit.roundFloor(offset.utcToLocalTime(utcMillis));
return offset.localToUtc(roundedLocalMillis, this);
}
@Override
public long inGap(long localMillis, Gap gap) {
// Round from just before the start of the gap
return gap.previous().localToUtc(unit.roundFloor(gap.firstMissingLocalTime() - 1), this);
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return inGap(localMillis, gap);
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
// Convert the overlap at this offset because that'll produce the largest result.
return overlap.localToUtcInThisOffset(localMillis);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
if (overlap.firstNonOverlappingLocalTime() - overlap.firstOverlappingLocalTime() >= unitMillis) {
return overlap.localToUtcInThisOffset(localMillis);
}
return overlap.previous().localToUtc(localMillis, this); // This is mostly for Asia/Lord_Howe
}
}
private class JavaTimeToMidnightRounding extends TimeUnitPreparedRounding {
@Override
public long round(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime localMidnight = truncateLocalDateTime(localDateTime);
return firstTimeOnDay(localMidnight);
}
@Override
public long nextRoundingValue(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime);
LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight);
return firstTimeOnDay(localMidnight);
}
@Override
protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) {
// We don't have the right information needed to know if this is safe for this time zone so we always use java rounding
return this;
}
private long firstTimeOnDay(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight";
// Now work out what localMidnight actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(localMidnight);
if (currentOffsets.isEmpty() == false) {
// There is at least one midnight on this day, so choose the first
final ZoneOffset firstOffset = currentOffsets.get(0);
final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset);
return offsetMidnight.toInstant().toEpochMilli();
} else {
// There were no midnights on this day, so we must have entered the day via an offset transition.
// Use the time of the transition as it is the earliest time on the right day.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.MIDNIGHT) : "nextRelevantMidnight should only be called at midnight";
switch (unit) {
case DAY_OF_MONTH:
return localMidnight.plus(1, ChronoUnit.DAYS);
case WEEK_OF_WEEKYEAR:
return localMidnight.plus(7, ChronoUnit.DAYS);
case MONTH_OF_YEAR:
return localMidnight.plus(1, ChronoUnit.MONTHS);
case QUARTER_OF_YEAR:
return localMidnight.plus(3, ChronoUnit.MONTHS);
case YEAR_OF_CENTURY:
return localMidnight.plus(1, ChronoUnit.YEARS);
default:
throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit);
}
}
}
private class JavaTimeNotToMidnightRounding extends AbstractNotToMidnightRounding {
JavaTimeNotToMidnightRounding(long unitMillis) {
super(unitMillis);
}
@Override
public long round(long utcMillis) {
Instant instant = Instant.ofEpochMilli(utcMillis);
final ZoneRules rules = timeZone.getRules();
while (true) {
final Instant truncatedTime = truncateAsLocalTime(instant, rules);
final ZoneOffsetTransition previousTransition = rules.previousTransition(instant);
if (previousTransition == null) {
// truncateAsLocalTime cannot have failed if there were no previous transitions
return truncatedTime.toEpochMilli();
}
Instant previousTransitionInstant = previousTransition.getInstant();
if (truncatedTime != null && previousTransitionInstant.compareTo(truncatedTime) < 1) {
return truncatedTime.toEpochMilli();
}
// There was a transition in between the input time and the truncated time. Return to the transition time and
// round that down instead.
instant = previousTransitionInstant.minusNanos(1_000_000);
}
}
private Instant truncateAsLocalTime(Instant instant, final ZoneRules rules) {
assert unitRoundsToMidnight == false : "truncateAsLocalTime should not be called if unitRoundsToMidnight";
LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, timeZone);
final LocalDateTime truncatedLocalDateTime = truncateLocalDateTime(localDateTime);
final List<ZoneOffset> currentOffsets = rules.getValidOffsets(truncatedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// at least one possibilities - choose the latest one that's still no later than the input time
for (int offsetIndex = currentOffsets.size() - 1; offsetIndex >= 0; offsetIndex--) {
final Instant result = truncatedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)).toInstant();
if (result.isAfter(instant) == false) {
return result;
}
}
assert false : "rounded time not found for " + instant + " with " + this;
return null;
} else {
// The chosen local time didn't happen. This means we were given a time in an hour (or a minute) whose start
// is missing due to an offset transition, so the time cannot be truncated.
return null;
}
}
}
private abstract class AbstractNotToMidnightRounding extends TimeUnitPreparedRounding {
protected final long unitMillis;
AbstractNotToMidnightRounding(long unitMillis) {
this.unitMillis = unitMillis;
}
@Override
public final long nextRoundingValue(long utcMillis) {
final long roundedAfterOneIncrement = round(utcMillis + unitMillis);
if (utcMillis < roundedAfterOneIncrement) {
return roundedAfterOneIncrement;
} else {
return round(utcMillis + 2 * unitMillis);
}
}
}
}
static class TimeIntervalRounding extends Rounding {
static final byte ID = 2;
private final long interval;
private final ZoneId timeZone;
TimeIntervalRounding(long interval, ZoneId timeZone) {
if (interval < 1)
throw new IllegalArgumentException("Zero or negative time interval not supported");
this.interval = interval;
this.timeZone = timeZone;
}
TimeIntervalRounding(StreamInput in) throws IOException {
this(in.readVLong(), in.readZoneId());
}
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeVLong(interval);
out.writeZoneId(timeZone);
}
@Override
public byte id() {
return ID;
}
@Override
public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
long minLookup = minUtcMillis - interval;
long maxLookup = maxUtcMillis;
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(timeZone, minLookup, maxLookup);
if (lookup == null) {
return prepareJavaTime();
}
LocalTimeOffset fixedOffset = lookup.fixedInRange(minLookup, maxLookup);
if (fixedOffset != null) {
return new FixedRounding(fixedOffset);
}
return new VariableRounding(lookup);
}
@Override
public Prepared prepareForUnknown() {
LocalTimeOffset offset = LocalTimeOffset.fixedOffset(timeZone);
if (offset != null) {
return new FixedRounding(offset);
}
return prepareJavaTime();
}
@Override
Prepared prepareJavaTime() {
return new JavaTimeRounding();
}
@Override
public long offset() {
return 0;
}
@Override
public Rounding withoutOffset() {
return this;
}
@Override
public int hashCode() {
return Objects.hash(interval, timeZone);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TimeIntervalRounding other = (TimeIntervalRounding) obj;
return Objects.equals(interval, other.interval) && Objects.equals(timeZone, other.timeZone);
}
@Override
public String toString() {
return "Rounding[" + interval + " in " + timeZone + "]";
}
private long roundKey(long value, long interval) {
if (value < 0) {
return (value - interval + 1) / interval;
} else {
return value / interval;
}
}
private abstract class TimeIntervalPreparedRounding implements Prepared {
@Override
public double roundingSize(long utcMillis, DateTimeUnit timeUnit) {
if (timeUnit.isMillisBased) {
return (double) interval / timeUnit.ratio;
} else {
throw new IllegalArgumentException("Cannot use month-based rate unit [" + timeUnit.shortName +
"] with fixed interval based histogram, only week, day, hour, minute and second are supported for " +
"this histogram");
}
}
}
/**
* Rounds to down inside of a time zone with an "effectively fixed"
* time zone. A time zone can be "effectively fixed" if:
* <ul>
* <li>It is UTC</li>
* <li>It is a fixed offset from UTC at all times (UTC-5, America/Phoenix)</li>
* <li>It is fixed over the entire range of dates that will be rounded</li>
* </ul>
*/
private class FixedRounding extends TimeIntervalPreparedRounding {
private final LocalTimeOffset offset;
FixedRounding(LocalTimeOffset offset) {
this.offset = offset;
}
@Override
public long round(long utcMillis) {
return offset.localToUtcInThisOffset(roundKey(offset.utcToLocalTime(utcMillis), interval) * interval);
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is used in date range's collect so we should optimize it too
return new JavaTimeRounding().nextRoundingValue(utcMillis);
}
}
/**
* Rounds down inside of any time zone, even if it is not
* "effectively fixed". See {@link FixedRounding} for a description of
* "effectively fixed".
*/
private class VariableRounding extends TimeIntervalPreparedRounding implements LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
VariableRounding(LocalTimeOffset.Lookup lookup) {
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
return offset.localToUtc(roundKey(offset.utcToLocalTime(utcMillis), interval) * interval, this);
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is used in date range's collect so we should optimize it too
return new JavaTimeRounding().nextRoundingValue(utcMillis);
}
@Override
public long inGap(long localMillis, Gap gap) {
return gap.startUtcMillis();
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return gap.previous().localToUtc(localMillis, this);
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
// Convert the overlap at this offset because that'll produce the largest result.
return overlap.localToUtcInThisOffset(localMillis);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(roundKey(overlap.firstNonOverlappingLocalTime() - 1, interval) * interval, this);
}
}
/**
* Rounds down inside of any time zone using {@link LocalDateTime}
* directly. It'll be slower than {@link VariableRounding} and much
* slower than {@link FixedRounding}. We use it when we don' have an
* "effectively fixed" time zone and we can't get a
* {@link LocalTimeOffset.Lookup}. We might not be able to get one
* because:
* <ul>
* <li>We don't know how to look up the minimum and maximum dates we
* are going to round.</li>
* <li>We expect to round over thousands and thousands of years worth
* of dates with the same {@link Prepared} instance.</li>
* </ul>
*/
private class JavaTimeRounding extends TimeIntervalPreparedRounding {
@Override
public long round(long utcMillis) {
final Instant utcInstant = Instant.ofEpochMilli(utcMillis);
final LocalDateTime rawLocalDateTime = LocalDateTime.ofInstant(utcInstant, timeZone);
// a millisecond value with the same local time, in UTC, as `utcMillis` has in `timeZone`
final long localMillis = utcMillis + timeZone.getRules().getOffset(utcInstant).getTotalSeconds() * 1000;
assert localMillis == rawLocalDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
final long roundedMillis = roundKey(localMillis, interval) * interval;
final LocalDateTime roundedLocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(roundedMillis), ZoneOffset.UTC);
// Now work out what roundedLocalDateTime actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(roundedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// There is at least one instant with the desired local time. In general the desired result is
// the latest rounded time that's no later than the input time, but this could involve rounding across
// a timezone transition, which may yield the wrong result
final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(utcInstant.plusMillis(1));
for (int offsetIndex = currentOffsets.size() - 1; 0 <= offsetIndex; offsetIndex--) {
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(offsetIndex));
final Instant offsetInstant = offsetTime.toInstant();
if (previousTransition != null && offsetInstant.isBefore(previousTransition.getInstant())) {
/*
* Rounding down across the transition can yield the
* wrong result. It's best to return to the transition
* time and round that down.
*/
return round(previousTransition.getInstant().toEpochMilli() - 1);
}
if (utcInstant.isBefore(offsetTime.toInstant()) == false) {
return offsetInstant.toEpochMilli();
}
}
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(0));
final Instant offsetInstant = offsetTime.toInstant();
assert false : this + " failed to round " + utcMillis + " down: " + offsetInstant + " is the earliest possible";
return offsetInstant.toEpochMilli(); // TODO or throw something?
} else {
// The desired time isn't valid because within a gap, so just return the start of the gap
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(roundedLocalDateTime);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
@Override
public long nextRoundingValue(long time) {
int offsetSeconds = timeZone.getRules().getOffset(Instant.ofEpochMilli(time)).getTotalSeconds();
long millis = time + interval + offsetSeconds * 1000;
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC)
.withZoneSameLocal(timeZone)
.toInstant().toEpochMilli();
}
}
}
static class OffsetRounding extends Rounding {
static final byte ID = 3;
private final Rounding delegate;
private final long offset;
OffsetRounding(Rounding delegate, long offset) {
this.delegate = delegate;
this.offset = offset;
}
OffsetRounding(StreamInput in) throws IOException {
// Versions before 7.6.0 will never send this type of rounding.
delegate = Rounding.read(in);
offset = in.readZLong();
}
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
delegate.writeTo(out);
out.writeZLong(offset);
}
@Override
public byte id() {
return ID;
}
@Override
public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
return wrapPreparedRounding(delegate.prepare(minUtcMillis - offset, maxUtcMillis - offset));
}
@Override
public Prepared prepareForUnknown() {
return wrapPreparedRounding(delegate.prepareForUnknown());
}
@Override
Prepared prepareJavaTime() {
return wrapPreparedRounding(delegate.prepareJavaTime());
}
private Prepared wrapPreparedRounding(Prepared delegatePrepared) {
return new Prepared() {
@Override
public long round(long utcMillis) {
return delegatePrepared.round(utcMillis - offset) + offset;
}
@Override
public long nextRoundingValue(long utcMillis) {
return delegatePrepared.nextRoundingValue(utcMillis - offset) + offset;
}
@Override
public double roundingSize(long utcMillis, DateTimeUnit timeUnit) {
return delegatePrepared.roundingSize(utcMillis, timeUnit);
}
};
}
@Override
public long offset() {
return offset;
}
@Override
public Rounding withoutOffset() {
return delegate;
}
@Override
public int hashCode() {
return Objects.hash(delegate, offset);
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
OffsetRounding other = (OffsetRounding) obj;
return delegate.equals(other.delegate) && offset == other.offset;
}
@Override
public String toString() {
return delegate + " offset by " + offset;
}
}
public static Rounding read(StreamInput in) throws IOException {
byte id = in.readByte();
switch (id) {
case TimeUnitRounding.ID:
return new TimeUnitRounding(in);
case TimeIntervalRounding.ID:
return new TimeIntervalRounding(in);
case OffsetRounding.ID:
return new OffsetRounding(in);
default:
throw new ElasticsearchException("unknown rounding id [" + id + "]");
}
}
/**
* Implementation of {@link Prepared} using pre-calculated "round down" points.
*/
private static class ArrayRounding implements Prepared {
private final long[] values;
private final int max;
private final Prepared delegate;
private ArrayRounding(long[] values, int max, Prepared delegate) {
this.values = values;
this.max = max;
this.delegate = delegate;
}
@Override
public long round(long utcMillis) {
assert values[0] <= utcMillis : "utcMillis must be after " + values[0];
int idx = Arrays.binarySearch(values, 0, max, utcMillis);
assert idx != -1 : "The insertion point is before the array! This should have tripped the assertion above.";
assert -1 - idx <= values.length : "This insertion point is after the end of the array.";
if (idx < 0) {
idx = -2 - idx;
}
return values[idx];
}
@Override
public long nextRoundingValue(long utcMillis) {
return delegate.nextRoundingValue(utcMillis);
}
@Override
public double roundingSize(long utcMillis, DateTimeUnit timeUnit) {
return delegate.roundingSize(utcMillis, timeUnit);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.llap.registry.impl;
import com.google.common.annotations.VisibleForTesting;
import org.apache.curator.framework.recipes.atomic.AtomicValue;
import org.apache.curator.framework.recipes.atomic.DistributedAtomicLong;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.apache.curator.framework.recipes.cache.ChildData;
import org.apache.curator.framework.recipes.cache.PathChildrenCache;
import org.apache.curator.utils.CloseableUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.LlapUtil;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.llap.registry.LlapServiceInstance;
import org.apache.hadoop.hive.llap.registry.LlapServiceInstanceSet;
import org.apache.hadoop.hive.llap.registry.ServiceRegistry;
import org.apache.hadoop.hive.registry.impl.ServiceInstanceBase;
import org.apache.hadoop.hive.registry.impl.ZkRegistryBase;
import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.apache.hadoop.registry.client.binding.RegistryUtils.ServiceRecordMarshal;
import org.apache.hadoop.registry.client.types.AddressTypes;
import org.apache.hadoop.registry.client.types.Endpoint;
import org.apache.hadoop.registry.client.types.ProtocolTypes;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LlapZookeeperRegistryImpl
extends ZkRegistryBase<LlapServiceInstance> implements ServiceRegistry<LlapServiceInstance> {
private static final Logger LOG = LoggerFactory.getLogger(LlapZookeeperRegistryImpl.class);
/**
* IPC endpoint names.
*/
private static final String IPC_SERVICES = "services";
private static final String IPC_MNG = "llapmng";
private static final String IPC_SHUFFLE = "shuffle";
private static final String IPC_LLAP = "llap";
private static final String IPC_OUTPUTFORMAT = "llapoutputformat";
private static final String IPC_EXTERNAL_LLAP = "externalllap";
private final static String NAMESPACE_PREFIX = "llap-";
private static final String SLOT_PREFIX = "slot-";
private static final String SASL_LOGIN_CONTEXT_NAME = "LlapZooKeeperClient";
private static final String CONFIG_CHANGE_PATH = "config-change";
private static final String CONFIG_CHANGE_NODE = "window-end";
private SlotZnode slotZnode;
private ServiceRecord daemonZkRecord;
// to be used by clients of ServiceRegistry TODO: this is unnecessary
private DynamicServiceInstanceSet instances;
private DistributedAtomicLong lockWindowEnd;
public LlapZookeeperRegistryImpl(String instanceName, Configuration conf) {
super(instanceName, conf,
HiveConf.getVar(conf, ConfVars.LLAP_ZK_REGISTRY_NAMESPACE), NAMESPACE_PREFIX,
USER_SCOPE_PATH_PREFIX, WORKER_PREFIX, WORKER_GROUP,
LlapProxy.isDaemon() ? SASL_LOGIN_CONTEXT_NAME : null,
HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_PRINCIPAL),
HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_KEYTAB_FILE),
ConfVars.LLAP_VALIDATE_ACLS);
LOG.info("Llap Zookeeper Registry is enabled with registryid: " + instanceName);
}
public Endpoint getRpcEndpoint() {
final int rpcPort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_RPC_PORT);
return RegistryTypeUtils.ipcEndpoint(IPC_LLAP, new InetSocketAddress(hostname, rpcPort));
}
public Endpoint getShuffleEndpoint() {
final int shufflePort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT);
// HTTP today, but might not be
return RegistryTypeUtils.inetAddrEndpoint(IPC_SHUFFLE, ProtocolTypes.PROTOCOL_TCP, hostname,
shufflePort);
}
public Endpoint getServicesEndpoint() {
final int servicePort = HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_WEB_PORT);
final boolean isSSL = HiveConf.getBoolVar(conf, ConfVars.LLAP_DAEMON_WEB_SSL);
final String scheme = isSSL ? "https" : "http";
final URL serviceURL;
try {
serviceURL = new URL(scheme, hostname, servicePort, "");
return RegistryTypeUtils.webEndpoint(IPC_SERVICES, serviceURL.toURI());
} catch (MalformedURLException e) {
throw new RuntimeException(e);
} catch (URISyntaxException e) {
throw new RuntimeException("llap service URI for " + hostname + " is invalid", e);
}
}
public Endpoint getMngEndpoint() {
return RegistryTypeUtils.ipcEndpoint(IPC_MNG, new InetSocketAddress(hostname,
HiveConf.getIntVar(conf, ConfVars.LLAP_MANAGEMENT_RPC_PORT)));
}
public Endpoint getOutputFormatEndpoint() {
return RegistryTypeUtils.ipcEndpoint(IPC_OUTPUTFORMAT, new InetSocketAddress(hostname,
HiveConf.getIntVar(conf, ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT)));
}
private Endpoint getExternalRpcEndpoint() {
int port = HiveConf.getIntVar(conf, ConfVars.LLAP_EXTERNAL_CLIENT_CLOUD_RPC_PORT);
String host = LlapUtil.getPublicHostname();
return RegistryTypeUtils.ipcEndpoint(IPC_EXTERNAL_LLAP, new InetSocketAddress(host, port));
}
@Override
public String register() throws IOException {
daemonZkRecord = new ServiceRecord();
Endpoint rpcEndpoint = getRpcEndpoint();
daemonZkRecord.addInternalEndpoint(rpcEndpoint);
daemonZkRecord.addInternalEndpoint(getMngEndpoint());
daemonZkRecord.addInternalEndpoint(getShuffleEndpoint());
daemonZkRecord.addExternalEndpoint(getServicesEndpoint());
daemonZkRecord.addInternalEndpoint(getOutputFormatEndpoint());
Endpoint externalRpcEndpoint = null;
if (LlapUtil.isCloudDeployment(conf)) {
externalRpcEndpoint = getExternalRpcEndpoint();
daemonZkRecord.addExternalEndpoint(externalRpcEndpoint);
}
populateConfigValues(this.conf);
Map<String, String> capacityValues = new HashMap<>(2);
capacityValues.put(LlapRegistryService.LLAP_DAEMON_NUM_ENABLED_EXECUTORS,
HiveConf.getVarWithoutType(conf, ConfVars.LLAP_DAEMON_NUM_EXECUTORS));
capacityValues.put(LlapRegistryService.LLAP_DAEMON_TASK_SCHEDULER_ENABLED_WAIT_QUEUE_SIZE,
HiveConf.getVarWithoutType(conf, ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE));
populateConfigValues(capacityValues.entrySet());
String uniqueId = UNIQUE_ID.toString();
long znodeCreationTimeout = 120;
initializeWithoutRegisteringInternal();
// Create a znode under the rootNamespace parent for this instance of the server
try {
slotZnode = new SlotZnode(
zooKeeperClient, workersPath, SLOT_PREFIX, WORKER_PREFIX, uniqueId);
if (!slotZnode.start(znodeCreationTimeout, TimeUnit.SECONDS)) {
throw new Exception(
"Max znode creation wait time: " + znodeCreationTimeout + "s exhausted");
}
} catch (Exception e) {
LOG.error("Unable to create a znode for this server instance", e);
CloseableUtils.closeQuietly(slotZnode);
super.stop();
throw (e instanceof IOException) ? (IOException)e : new IOException(e);
}
registerServiceRecord(daemonZkRecord, uniqueId);
if (LlapUtil.isCloudDeployment(conf)) {
LOG.info("Registered node. Created a znode on ZooKeeper for LLAP instance: rpc: {}, external client rpc : {} "
+ "shuffle: {}, webui: {}, mgmt: {}, znodePath: {}", rpcEndpoint, externalRpcEndpoint,
getShuffleEndpoint(), getServicesEndpoint(), getMngEndpoint(), getRegistrationZnodePath());
} else {
LOG.info("Registered node. Created a znode on ZooKeeper for LLAP instance: rpc: {}, "
+ "shuffle: {}, webui: {}, mgmt: {}, znodePath: {}", rpcEndpoint, getShuffleEndpoint(),
getServicesEndpoint(), getMngEndpoint(), getRegistrationZnodePath());
}
return uniqueId;
}
private void populateConfigValues(Iterable<Map.Entry<String, String>> attributes) {
for (Map.Entry<String, String> kv : attributes) {
if (kv.getKey().startsWith(HiveConf.PREFIX_LLAP)
|| kv.getKey().startsWith(HiveConf.PREFIX_HIVE_LLAP)) {
// TODO: read this somewhere useful, like the task scheduler
daemonZkRecord.set(kv.getKey(), kv.getValue());
}
}
}
@Override
public void updateRegistration(Iterable<Map.Entry<String, String>> attributes) throws IOException {
populateConfigValues(attributes);
updateServiceRecord(this.daemonZkRecord, doCheckAcls, true);
}
@Override
public void unregister() throws IOException {
// Nothing for the zkCreate models
}
/**
* A dynamically changing instance in an Llap Service. Can become inactive if failing or can be
* blacklisted (set to 0 capacity) if too slow (See: BlacklistingLlapMetricsListener).
*/
@VisibleForTesting
public class DynamicServiceInstance
extends ServiceInstanceBase implements LlapServiceInstance {
private final int mngPort;
private final int shufflePort;
private final int outputFormatPort;
private final String serviceAddress;
private String externalHost;
private int externalClientsRpcPort;
private final Resource resource;
public DynamicServiceInstance(ServiceRecord srv) throws IOException {
super(srv, IPC_LLAP);
final Endpoint shuffle = srv.getInternalEndpoint(IPC_SHUFFLE);
final Endpoint mng = srv.getInternalEndpoint(IPC_MNG);
final Endpoint outputFormat = srv.getInternalEndpoint(IPC_OUTPUTFORMAT);
final Endpoint services = srv.getExternalEndpoint(IPC_SERVICES);
this.mngPort =
Integer.parseInt(RegistryTypeUtils.getAddressField(mng.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.shufflePort =
Integer.parseInt(RegistryTypeUtils.getAddressField(shuffle.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.outputFormatPort =
Integer.valueOf(RegistryTypeUtils.getAddressField(outputFormat.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
this.serviceAddress =
RegistryTypeUtils.getAddressField(services.addresses.get(0), AddressTypes.ADDRESS_URI);
if (LlapUtil.isCloudDeployment(conf)) {
final Endpoint externalRpc = srv.getExternalEndpoint(IPC_EXTERNAL_LLAP);
this.externalHost = RegistryTypeUtils.getAddressField(externalRpc.addresses.get(0),
AddressTypes.ADDRESS_HOSTNAME_FIELD);
this.externalClientsRpcPort = Integer.parseInt(
RegistryTypeUtils.getAddressField(externalRpc.addresses.get(0),
AddressTypes.ADDRESS_PORT_FIELD));
}
String memStr = srv.get(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, "");
String coreStr = srv.get(LlapRegistryService.LLAP_DAEMON_NUM_ENABLED_EXECUTORS, "");
try {
this.resource = Resource.newInstance(Integer.parseInt(memStr), Integer.parseInt(coreStr));
} catch (NumberFormatException ex) {
throw new IOException("Invalid resource configuration for a LLAP node: memory "
+ memStr + ", vcores " + coreStr);
}
}
@Override
public int getShufflePort() {
return shufflePort;
}
@Override
public String getServicesAddress() {
return serviceAddress;
}
@Override
public String getExternalHostname() {
ensureCloudEnv(LlapZookeeperRegistryImpl.this.conf);
return externalHost;
}
@Override
public int getExternalClientsRpcPort() {
ensureCloudEnv(LlapZookeeperRegistryImpl.this.conf);
return externalClientsRpcPort;
}
@Override
public Resource getResource() {
return resource;
}
@Override
public String toString() {
return "DynamicServiceInstance [id=" + getWorkerIdentity() + ", host=" + getHost() + ":" + getRpcPort() +
" with resources=" + getResource() + ", shufflePort=" + getShufflePort() +
", servicesAddress=" + getServicesAddress() + ", mgmtPort=" + getManagementPort() + "]";
}
@Override
public int getManagementPort() {
return mngPort;
}
@Override
public int getOutputFormatPort() {
return outputFormatPort;
}
}
// TODO: this class is completely unnecessary... 1-on-1 mapping with parent.
// Remains here as the legacy of the original higher-level interface (getInstance).
private static class DynamicServiceInstanceSet implements LlapServiceInstanceSet {
private final PathChildrenCache instancesCache;
private final LlapZookeeperRegistryImpl parent;
private final ServiceRecordMarshal encoder;
public DynamicServiceInstanceSet(PathChildrenCache cache,
LlapZookeeperRegistryImpl parent, ServiceRecordMarshal encoder) {
this.instancesCache = cache;
this.parent = parent;
this.encoder = encoder;
parent.populateCache(instancesCache, false);
}
@Override
public Collection<LlapServiceInstance> getAll() {
return parent.getAllInternal();
}
@Override
public Collection<LlapServiceInstance> getAllInstancesOrdered(boolean consistentIndexes) {
return parent.getAllInstancesOrdered(consistentIndexes, instancesCache);
}
@Override
public LlapServiceInstance getInstance(String name) {
Collection<LlapServiceInstance> instances = getAll();
for(LlapServiceInstance instance : instances) {
if (instance.getWorkerIdentity().equals(name)) {
return instance;
}
}
return null;
}
@Override
public Set<LlapServiceInstance> getByHost(String host) {
return parent.getByHostInternal(host);
}
@Override
public int size() {
return parent.sizeInternal();
}
@Override
public ApplicationId getApplicationId() {
for (ChildData childData : instancesCache.getCurrentData()) {
byte[] data = getWorkerData(childData, WORKER_PREFIX);
if (data == null) continue;
ServiceRecord sr = null;
try {
sr = encoder.fromBytes(childData.getPath(), data);
} catch (IOException e) {
LOG.error("Unable to decode data for zkpath: {}." +
" Ignoring from current instances list..", childData.getPath());
continue;
}
String containerStr = sr.get(HiveConf.ConfVars.LLAP_DAEMON_CONTAINER_ID.varname);
if (containerStr == null || containerStr.isEmpty()) continue;
return ContainerId.fromString(containerStr).getApplicationAttemptId().getApplicationId();
}
return null;
}
}
private static String extractWorkerIdFromSlot(ChildData childData) {
return new String(childData.getData(), SlotZnode.CHARSET);
}
// The real implementation for the instanceset... instanceset has its own copy of the
// ZK cache yet completely depends on the parent in every other aspect and is thus unneeded.
Collection<LlapServiceInstance> getAllInstancesOrdered(
boolean consistentIndexes, PathChildrenCache instancesCache) {
Map<String, Long> slotByWorker = new HashMap<String, Long>();
Set<LlapServiceInstance> unsorted = Sets.newHashSet();
for (ChildData childData : instancesCache.getCurrentData()) {
if (childData == null) continue;
byte[] data = childData.getData();
if (data == null) continue;
String nodeName = extractNodeName(childData);
if (nodeName.startsWith(WORKER_PREFIX)) {
LlapServiceInstance instances = getInstanceByPath(childData.getPath());
if (instances != null) {
unsorted.add(instances);
}
} else if (nodeName.startsWith(SLOT_PREFIX)) {
slotByWorker.put(extractWorkerIdFromSlot(childData),
Long.parseLong(nodeName.substring(SLOT_PREFIX.length())));
} else {
LOG.info("Ignoring unknown node {}", childData.getPath());
}
}
TreeMap<Long, LlapServiceInstance> sorted = new TreeMap<>();
long maxSlot = Long.MIN_VALUE;
for (LlapServiceInstance worker : unsorted) {
Long slot = slotByWorker.get(worker.getWorkerIdentity());
if (slot == null) {
LOG.info("Unknown slot for {}", worker.getWorkerIdentity());
continue;
}
maxSlot = Math.max(maxSlot, slot);
sorted.put(slot, worker);
}
if (consistentIndexes) {
// Add dummy instances to all slots where LLAPs are MIA... I can haz insert_iterator?
TreeMap<Long, LlapServiceInstance> dummies = new TreeMap<>();
Iterator<Long> keyIter = sorted.keySet().iterator();
long expected = 0;
Long ts = null;
while (keyIter.hasNext()) {
Long slot = keyIter.next();
assert slot >= expected;
while (slot > expected) {
if (ts == null) {
ts = System.nanoTime(); // Inactive nodes restart every call!
}
dummies.put(expected, new InactiveServiceInstance("inactive-" + expected + "-" + ts));
++expected;
}
++expected;
}
sorted.putAll(dummies);
}
return sorted.values();
}
private static String extractNodeName(ChildData childData) {
String nodeName = childData.getPath();
int ix = nodeName.lastIndexOf("/");
if (ix >= 0) {
nodeName = nodeName.substring(ix + 1);
}
return nodeName;
}
@Override
public LlapServiceInstanceSet getInstances(
String component, long clusterReadyTimeoutMs) throws IOException {
PathChildrenCache instancesCache = ensureInstancesCache(clusterReadyTimeoutMs);
// lazily create instances
if (instances == null) {
this.instances = new DynamicServiceInstanceSet(instancesCache, this, encoder);
}
return instances;
}
@Override
public ApplicationId getApplicationId() throws IOException {
return getInstances("LLAP", 0).getApplicationId();
}
@Override
public void stop() {
CloseableUtils.closeQuietly(slotZnode);
super.stop();
}
@Override
protected LlapServiceInstance createServiceInstance(ServiceRecord srv) throws IOException {
return new DynamicServiceInstance(srv);
}
@Override
protected String getZkPathUser(Configuration conf) {
// External LLAP clients would need to set LLAP_ZK_REGISTRY_USER to the LLAP daemon user (hive),
// rather than relying on LlapRegistryService.currentUser().
return HiveConf.getVar(conf, ConfVars.LLAP_ZK_REGISTRY_USER, LlapRegistryService.currentUser());
}
/**
* Locks the Llap Cluster for configuration change for the given time window.
* @param windowStart The beginning of the time window when no other configuration change is allowed.
* @param windowEnd The end of the time window when no other configuration change is allowed.
* @return The result of the change (success if the lock is succeeded, and the next possible
* configuration change time
*/
public ConfigChangeLockResult lockForConfigChange(long windowStart, long windowEnd) {
if (windowEnd < windowStart) {
throw new IllegalArgumentException(
"WindowStart=" + windowStart + " can not be smaller than WindowEnd=" + windowEnd);
}
try {
if (lockWindowEnd == null) {
// Create the node with the /llap-sasl/hiveuser/hostname/config-change/next-change path without retry
lockWindowEnd = new DistributedAtomicLong(zooKeeperClient,
String.join("/", workersPath.substring(0, workersPath.lastIndexOf('/')), CONFIG_CHANGE_PATH,
CONFIG_CHANGE_NODE), (i, j, sleeper) -> false);
lockWindowEnd.initialize(0L);
}
AtomicValue<Long> current = lockWindowEnd.get();
if (!current.succeeded()) {
LOG.debug("Can not get the current configuration lock time");
return new ConfigChangeLockResult(false, -1L);
}
if (current.postValue() > windowStart) {
LOG.debug("Can not lock window {}-{}. Current value is {}.", windowStart, windowEnd, current.postValue());
return new ConfigChangeLockResult(false, current.postValue());
}
current = lockWindowEnd.compareAndSet(current.postValue(), windowEnd);
if (!current.succeeded()) {
LOG.debug("Can not lock window {}-{}. Current value is changed to {}.", windowStart, windowEnd,
current.postValue());
return new ConfigChangeLockResult(false, current.postValue());
}
return new ConfigChangeLockResult(true, current.postValue());
} catch (Throwable t) {
LOG.info("Can not reserve configuration change lock", t);
return new ConfigChangeLockResult(false, -1L);
}
}
/**
* The return data of a config change. Successful or not successful and the next time a config
* change can be attempted.
*/
public static class ConfigChangeLockResult {
private final boolean success;
private final long nextConfigChangeTime;
@VisibleForTesting
public ConfigChangeLockResult(boolean success, long nextConfigChangeTime) {
this.success = success;
this.nextConfigChangeTime = nextConfigChangeTime;
}
public boolean isSuccess() {
return success;
}
public long getNextConfigChangeTime() {
return nextConfigChangeTime;
}
@Override
public String toString() {
return "ConfigChangeLockResult [" + success + "," + nextConfigChangeTime + "]";
}
}
}
|
|
/*******************************************************************************
* Copyright 2020 Cognizant Technology Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.cognizant.devops.platformdal.healthutil;
import java.util.Iterator;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.cognizant.devops.platformcommons.config.ApplicationConfigProvider;
import com.cognizant.devops.platformcommons.constants.AgentCommonConstant;
import com.cognizant.devops.platformcommons.constants.PlatformServiceConstants;
import com.cognizant.devops.platformcommons.constants.ServiceStatusConstants;
import com.cognizant.devops.platformcommons.core.util.JsonUtils;
import com.cognizant.devops.platformcommons.core.util.SystemStatus;
import com.cognizant.devops.platformcommons.dal.elasticsearch.ElasticSearchDBHandler;
import com.cognizant.devops.platformcommons.dal.neo4j.GraphDBHandler;
import com.cognizant.devops.platformcommons.dal.neo4j.GraphResponse;
import com.cognizant.devops.platformcommons.dal.neo4j.NodeData;
import com.cognizant.devops.platformcommons.exception.InsightsCustomException;
import com.cognizant.devops.platformdal.agentConfig.AgentConfig;
import com.cognizant.devops.platformdal.agentConfig.AgentConfigDAL;
import com.cognizant.devops.platformdal.dal.PostgresMetadataHandler;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
public class HealthUtil {
static Logger log = LogManager.getLogger(HealthUtil.class);
private static final String VERSION = "version";
private static final String HOST_ENDPOINT = "endPoint";
private static final String AGENT_NODES = "agentNodes";
private static final String HEALTH_STATUS = "healthStatus";
private static final String LAST_RUN_TIME = "lastRunTime";
/**
* Method to fetch Client Response for Services and components
*
* @param hostEndPoint
* @param apiUrl
* @param displayType
* @param serviceType
* @param isRequiredAuthentication
* @param username
* @param password
* @param authToken
* @return JsonObject
*/
public JsonObject getClientResponse(String hostEndPoint, String apiUrl, String displayType, String serviceType,
boolean isRequiredAuthentication, String username, String password, String authToken) {
JsonObject returnResponse = null;
String strResponse = "";
JsonObject json = null;
String version = "";
String serviceResponse;
ElasticSearchDBHandler apiCallElasticsearch = new ElasticSearchDBHandler();
try {
if (isRequiredAuthentication) {
serviceResponse = SystemStatus.jerseyGetClientWithAuthentication(apiUrl, username, password, authToken);
} else {
serviceResponse = apiCallElasticsearch.search(apiUrl);
}
if (serviceResponse != null && !("").equalsIgnoreCase(serviceResponse)) {
strResponse = "Response successfully recieved from " + apiUrl;
log.info("response: {} ",serviceResponse);
if (serviceType.equalsIgnoreCase(ServiceStatusConstants.Neo4j)) {
json = JsonUtils.parseStringAsJsonObject(serviceResponse);
version = json.get("neo4j_version").getAsString();
String totalDBSize = getNeo4jDBSize(hostEndPoint, username, password, authToken);
returnResponse = buildSuccessResponse(strResponse, hostEndPoint, displayType, version);
returnResponse.addProperty("totalDBSize", totalDBSize);
} else if (serviceType.equalsIgnoreCase(ServiceStatusConstants.RabbitMq)) {
json = JsonUtils.parseStringAsJsonObject(serviceResponse);
version = "RabbitMq version " + json.get("rabbitmq_version").getAsString() + "\n Erlang version "
+ json.get("erlang_version").getAsString();
returnResponse = buildSuccessResponse(strResponse, hostEndPoint, displayType, version);
} else if (serviceType.equalsIgnoreCase(ServiceStatusConstants.ES)) {
json = JsonUtils.parseStringAsJsonObject(serviceResponse);
JsonObject versionElasticsearch = (JsonObject) json.get(VERSION);
if (versionElasticsearch != null) {
version = versionElasticsearch.get("number").getAsString();
}
returnResponse = buildSuccessResponse(strResponse, hostEndPoint, displayType, version);
} else if (serviceType.equalsIgnoreCase(ServiceStatusConstants.PgSQL)) {
PostgresMetadataHandler pgdbHandler = new PostgresMetadataHandler();
version = pgdbHandler.getPostgresDBVersion();
returnResponse = buildSuccessResponse(strResponse, hostEndPoint, displayType, version);
}
} else {
strResponse = "Response not received from service " + apiUrl;
returnResponse = buildFailureResponse(strResponse, hostEndPoint, displayType, version);
}
} catch (Exception e) {
log.error("Error while capturing health check at {} ",apiUrl, e);
log.error(e.getMessage());
strResponse = "Error while capturing health check at " + apiUrl;
returnResponse = buildFailureResponse(strResponse, hostEndPoint, displayType, version);
}
return returnResponse;
}
/**
* Method to build Success Response
*
* @param message
* @param apiUrl
* @param type
* @param version
* @return JsonObject
*/
public JsonObject buildSuccessResponse(String message, String apiUrl, String type, String version) {
JsonObject jsonResponse = new JsonObject();
jsonResponse.addProperty(PlatformServiceConstants.STATUS, PlatformServiceConstants.SUCCESS);
jsonResponse.addProperty(PlatformServiceConstants.MESSAGE, message);
jsonResponse.addProperty(HOST_ENDPOINT, apiUrl);
jsonResponse.addProperty(ServiceStatusConstants.type, type);
jsonResponse.addProperty(VERSION, version);
return jsonResponse;
}
/**
* Method to build Failure Response
*
* @param message
* @param apiUrl
* @param type
* @param version
* @return JsonObject
*/
public JsonObject buildFailureResponse(String message, String apiUrl, String type, String version) {
JsonObject jsonResponse = new JsonObject();
jsonResponse.addProperty(PlatformServiceConstants.STATUS, PlatformServiceConstants.FAILURE);
jsonResponse.addProperty(PlatformServiceConstants.MESSAGE, message);
jsonResponse.addProperty(HOST_ENDPOINT, apiUrl);
jsonResponse.addProperty(ServiceStatusConstants.type, type);
jsonResponse.addProperty(VERSION, version);
return jsonResponse;
}
/**
* Method to fetch status of Components
*
* @param serviceType
* @return JsonObject
*/
public JsonObject getComponentStatus(String serviceType) {
JsonObject returnObject = null;
try {
if (serviceType.equalsIgnoreCase("PlatformEngine")) {
returnObject = getServiceResponse("HEALTH:ENGINE", 1);
} else if (serviceType.equalsIgnoreCase("PlatformWebhookSubscriber")) {
returnObject = getServiceResponse("HEALTH:WEBHOOKSUBSCRIBER", 1);
} else if (serviceType.equalsIgnoreCase("PlatformWebhookEngine")) {
returnObject = getServiceResponse("HEALTH:WEBHOOKENGINE", 1);
} else if (serviceType.equalsIgnoreCase("PlatformAuditEngine")) {
returnObject = getServiceResponse("HEALTH:AUDITENGINE", 1);
} else if (serviceType.equalsIgnoreCase("PlatformDataArchivalEngine")) {
returnObject = getServiceResponse("HEALTH:DATAARCHIVALENGINE", 1);
} else if (serviceType.equalsIgnoreCase("PlatformWorkflow")) {
returnObject = getServiceResponse("HEALTH:INSIGHTS_WORKFLOW", 1);
} else if (serviceType.equalsIgnoreCase("PlatformService")) {
returnObject = getServiceResponse("HEALTH:INSIGHTS_PLATFORMSERVICE", 1);
}else if (serviceType.equalsIgnoreCase("Agents")) {
returnObject = getAgentResponse("HEALTH:LATEST",100);
}
} catch (Exception e) {
log.error(e.getMessage());
}
return returnObject;
}
/**
* Method to load Health Data
*
* @param label
* @param agentId
* @param limitOfRow
* @return GraphResponse
*/
public GraphResponse loadHealthData(String label, String agentId, int limitOfRow) {
String query = "";
if (agentId.equalsIgnoreCase("")) {
query = "MATCH (n:" + label
+ ") where n.inSightsTime IS NOT NULL RETURN n order by n.inSightsTime DESC LIMIT " + limitOfRow;
} else if (!agentId.equalsIgnoreCase("")) {
String queueName = getAgentHealthQueueName(agentId);
// To handle case where Agent delete from Postgres but data present in Neo4j
if (queueName == null) {
queueName = label;
}
query = "MATCH (n:" + queueName + ") where n.inSightsTime IS NOT NULL and n.agentId ='" + agentId
+ "' RETURN n order by n.inSightsTime DESC LIMIT " + limitOfRow;
}
log.info("query ====== {} ", query);
GraphResponse graphResponse = null;
try {
GraphDBHandler dbHandler = new GraphDBHandler();
graphResponse = dbHandler.executeCypherQuery(query);
} catch (Exception e) {
log.error(e.getMessage());
graphResponse = new GraphResponse();
}
return graphResponse;
}
/**
* Method to build Agent Response
*
* @param status
* @param message
* @param graphResponse
* @return JsonObject
*/
private JsonObject buildAgentResponse(String status, String message, GraphResponse graphResponse) {
String toolcategory = "";
String toolName = "";
String insightTimeX = "";
String agentstatus = "";
String agentId = "";
JsonObject jsonResponse = new JsonObject();
JsonArray agentNode = new JsonArray();
if (status.equalsIgnoreCase(PlatformServiceConstants.SUCCESS)) {
jsonResponse.addProperty(ServiceStatusConstants.type, ServiceStatusConstants.Agents);
Iterator<NodeData> agentnodeIterator = graphResponse.getNodes().iterator();
while (agentnodeIterator.hasNext()) {
NodeData node = agentnodeIterator.next();
toolcategory = node.getPropertyMap().get(AgentCommonConstant.CATEGORY);
toolName = node.getPropertyMap().get(AgentCommonConstant.TOOLNAME);
if (node.getPropertyMap().containsKey(AgentCommonConstant.AGENTID)) {
agentId = node.getPropertyMap().get(AgentCommonConstant.AGENTID);
} else {
agentId = "";
}
agentstatus = node.getPropertyMap().get(PlatformServiceConstants.STATUS);
insightTimeX = node.getPropertyMap().get(PlatformServiceConstants.INSIGHTSTIMEX);
JsonObject jsonResponse2 = new JsonObject();
jsonResponse2.addProperty(PlatformServiceConstants.INSIGHTSTIMEX, insightTimeX);
jsonResponse2.addProperty(AgentCommonConstant.TOOLNAME, toolName);
jsonResponse2.addProperty(AgentCommonConstant.AGENTID, agentId);
jsonResponse2.addProperty(PlatformServiceConstants.INSIGHTSTIMEX, insightTimeX);
jsonResponse2.addProperty(PlatformServiceConstants.STATUS, agentstatus);
jsonResponse2.addProperty(AgentCommonConstant.CATEGORY, toolcategory);
agentNode.add(jsonResponse2);
}
jsonResponse.add(AGENT_NODES, agentNode);
} else {
jsonResponse.addProperty(PlatformServiceConstants.STATUS, PlatformServiceConstants.FAILURE);
jsonResponse.addProperty(PlatformServiceConstants.MESSAGE, message);
jsonResponse.addProperty(AgentCommonConstant.CATEGORY, toolcategory);
jsonResponse.addProperty(ServiceStatusConstants.type, ServiceStatusConstants.Agents);
jsonResponse.addProperty(AgentCommonConstant.TOOLNAME, toolName);
jsonResponse.addProperty(PlatformServiceConstants.INSIGHTSTIMEX, insightTimeX);
jsonResponse.addProperty(VERSION, "");
jsonResponse.add(AGENT_NODES, agentNode);
}
return jsonResponse;
}
/**
* Method to get Neo4h DB size
*
* @param hostEndPoint
* @param username
* @param password
* @param authToken
* @return String
*/
private String getNeo4jDBSize(String hostEndPoint, String username, String password, String authToken) {
long totalStoreSize = 0L;
String returnSize = "";
try {
if(ApplicationConfigProvider.getInstance().getGraph().getVersion().contains("4.")) {
GraphDBHandler dbHandler = new GraphDBHandler();
String storeSizeQuery = "CALL apoc.monitor.store() YIELD logSize, totalStoreSize RETURN sum(logSize+totalStoreSize)";
JsonObject storeSizeResponse = dbHandler.executeCypherQueryForJsonResponse(storeSizeQuery);
log.debug("Neo4j database store size Response ====== {} ",storeSizeResponse);
JsonArray dataArray = storeSizeResponse.get("results").getAsJsonArray().get(0).getAsJsonObject().get("data").getAsJsonArray();
totalStoreSize = dataArray.get(0).getAsJsonObject().get("row").getAsJsonArray().get(0).getAsLong();
} else {
String apiUrlForSize = hostEndPoint
+ "/db/manage/server/jmx/domain/org.neo4j/instance%3Dkernel%230%2Cname%3DStore+sizes";
String serviceNeo4jResponse = SystemStatus.jerseyGetClientWithAuthentication(apiUrlForSize, username,
password, authToken);
log.debug("serviceNeo4jResponse ====== {} ",serviceNeo4jResponse);
JsonElement object = JsonUtils.parseString(serviceNeo4jResponse);
if (object.isJsonArray()) {
if (object.getAsJsonArray().get(0).getAsJsonObject().get("attributes").isJsonArray()) {
JsonArray beans = object.getAsJsonArray().get(0).getAsJsonObject().get("attributes")
.getAsJsonArray();
for (JsonElement jsonElement : beans) {
if (jsonElement.getAsJsonObject().get("name").getAsString()
.equalsIgnoreCase("TotalStoreSize")) {
totalStoreSize = jsonElement.getAsJsonObject().get("value").getAsLong();
}
}
}
}
}
log.debug(" info totalStoreSize ==== {}",totalStoreSize);
if (totalStoreSize > 0) {
returnSize = humanReadableByteCount(totalStoreSize, Boolean.FALSE);
}
} catch (Exception e) {
log.error(e.getMessage());
log.error(" Error while geeting neo4j Size");
}
return returnSize;
}
/**
* Method to generate Human Readable byte count
*
* @param bytes
* @param si
* @return String
*/
public static String humanReadableByteCount(long bytes, boolean si) {
int unit = si ? 1000 : 1024;
if (bytes < unit)
return bytes + " B";
int exp = (int) (Math.log(bytes) / Math.log(unit));
String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp - 1) + (si ? "" : "i");
return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
}
/**
* Method to get Agent Health Queue name
*
* @param agentId
* @return String
*/
public String getAgentHealthQueueName(String agentId) {
String healthRoutingKey = null;
try {
AgentConfigDAL agentConfigDal = new AgentConfigDAL();
AgentConfig agentConfig = agentConfigDal.getAgentConfigurations(agentId);
JsonObject config = JsonUtils.parseStringAsJsonObject(agentConfig.getAgentJson());
JsonObject json = config.get("publish").getAsJsonObject();
healthRoutingKey = json.get("health").getAsString().replace(".", ":");
} catch (Exception e) {
log.error(" No DB record found for agentId {} ", agentId);
}
return healthRoutingKey;
}
/**
* Method to get Service Response
*
* @param labels
* @param noOfRows
* @return JsonObject
*/
private JsonObject getServiceResponse(String labels,int noOfRows) {
String successResponse = "";
String version = "";
String status = "";
JsonObject returnObject = null;
GraphResponse graphResponse = loadHealthData(labels,"", noOfRows);
if (graphResponse != null) {
if (!graphResponse.getNodes().isEmpty()) {
successResponse = graphResponse.getNodes().get(0).getPropertyMap().get("message");
version = graphResponse.getNodes().get(0).getPropertyMap().get(VERSION);
status = graphResponse.getNodes().get(0).getPropertyMap().get("status");
if (status.equalsIgnoreCase(PlatformServiceConstants.SUCCESS)) {
returnObject = buildSuccessResponse(successResponse, "-",
ServiceStatusConstants.Service, version);
} else {
returnObject = buildFailureResponse(successResponse, "-",
ServiceStatusConstants.Service, version);
}
} else {
successResponse = "Node list is empty in response not received from Neo4j";
returnObject = buildFailureResponse(successResponse, "-",
ServiceStatusConstants.Service, version);
}
} else {
successResponse = "Response not received from Neo4j";
returnObject = buildFailureResponse(successResponse, "-", ServiceStatusConstants.Service,
version);
}
return returnObject;
}
/**
* Method to fetch response of Agents
*
* @param labels
* @param noOfRows
* @return JsonObject
*/
private JsonObject getAgentResponse(String labels, int noOfRows) {
String successResponse = "";
String status = "";
GraphResponse graphResponse = loadHealthData(labels, "", noOfRows);
if (graphResponse != null) {
if (!graphResponse.getNodes().isEmpty()) {
status = PlatformServiceConstants.SUCCESS;
} else {
successResponse = "Node list is empty in response not received from Neo4j";
status = PlatformServiceConstants.FAILURE;
}
} else {
successResponse = "Response not received from Neo4j";
status = PlatformServiceConstants.FAILURE;
}
log.debug("message {} ", successResponse);
return buildAgentResponse(status, successResponse, graphResponse);
}
/**
* Method to fetch data component HTML
*
* @return String
*/
public JsonObject getDataComponentStatus() {
JsonObject dataComponentStatus = new JsonObject();
try {
String username = null;
String password = null;
String authToken = null;
String hostEndPoint = "";
String apiUrl = "";
hostEndPoint = ServiceStatusConstants.POSTGRESQL_HOST;
apiUrl = hostEndPoint;
JsonObject postgreStatus = getClientResponse(hostEndPoint, apiUrl, ServiceStatusConstants.DB,
ServiceStatusConstants.PgSQL, Boolean.FALSE, username, password, authToken);
dataComponentStatus.add(ServiceStatusConstants.PgSQL, postgreStatus);
hostEndPoint = ServiceStatusConstants.NEO4J_HOST;
apiUrl = hostEndPoint;
if(ApplicationConfigProvider.getInstance().getGraph().getVersion().contains("3.5")) {
apiUrl += "/db/data/";
}
authToken = ApplicationConfigProvider.getInstance().getGraph().getAuthToken();
JsonObject neo4jStatus = getClientResponse(hostEndPoint, apiUrl, ServiceStatusConstants.DB,
ServiceStatusConstants.Neo4j, Boolean.TRUE, username, password, authToken);
dataComponentStatus.add(ServiceStatusConstants.Neo4j, neo4jStatus);
hostEndPoint = ServiceStatusConstants.ES_HOST;
apiUrl = hostEndPoint;
JsonObject esStatus = getClientResponse(hostEndPoint, apiUrl, ServiceStatusConstants.DB,
ServiceStatusConstants.ES, Boolean.FALSE, username, password, authToken);
dataComponentStatus.add(ServiceStatusConstants.ES, esStatus);
hostEndPoint = ServiceStatusConstants.RABBIT_MQ;
apiUrl = hostEndPoint + "/api/overview";
authToken = null;
username = ApplicationConfigProvider.getInstance().getMessageQueue().getUser();
password = ApplicationConfigProvider.getInstance().getMessageQueue().getPassword();
JsonObject rabbitMq = getClientResponse(hostEndPoint, apiUrl, ServiceStatusConstants.DB,
ServiceStatusConstants.RabbitMq, Boolean.TRUE, username, password, authToken);
dataComponentStatus.add(ServiceStatusConstants.RabbitMq, rabbitMq);
} catch (Exception e) {
log.error("Worlflow Detail ==== Error creating HTML body for data components");
}
log.debug(" dataComponentStatus {} ", dataComponentStatus);
return dataComponentStatus;
}
/**
* Method to fetch Service HTML
*
* @return String
*/
public JsonObject getServiceStatus() {
JsonObject serviceStatus = new JsonObject();
try {
JsonObject jsonPlatformServiceStatus = getComponentStatus("PlatformService");
serviceStatus.add(ServiceStatusConstants.PlatformService, jsonPlatformServiceStatus);
JsonObject jsonPlatformEngineStatus = getComponentStatus("PlatformEngine");
serviceStatus.add(ServiceStatusConstants.PlatformEngine, jsonPlatformEngineStatus);
JsonObject jsonPlatformWorkflowStatus = getComponentStatus("PlatformWorkflow");
serviceStatus.add(ServiceStatusConstants.PlatformWorkflow, jsonPlatformWorkflowStatus);
} catch (Exception e) {
log.error("Worlflow Detail ==== Error creating HTML body for services");
}
log.debug(" serviceStatus {} ", serviceStatus);
return serviceStatus;
}
/**
* Method to get health status of all agents
*
* @return JsonObject
*/
public JsonObject getAgentsStatus() {
return getComponentStatus("Agents");
}
public JsonObject getRegisteredAgentsAndHealth() throws InsightsCustomException {
AgentConfigDAL agentConfigDAL = new AgentConfigDAL();
JsonObject agentDetails = new JsonObject();
JsonArray agentNodes = new JsonArray();
try {
List<AgentConfig> agentConfigList = agentConfigDAL.getAllDataAgentConfigurations();
for (AgentConfig agentConfig : agentConfigList) {
JsonObject node = new JsonObject();
node.addProperty("toolName", agentConfig.getToolName());
node.addProperty("agentId", agentConfig.getAgentKey());
JsonObject agentHealthNode = getAgentHealth(node, agentConfig.getAgentKey());
if(agentHealthNode.has(LAST_RUN_TIME)) {
node.addProperty(LAST_RUN_TIME,agentHealthNode.get(LAST_RUN_TIME).getAsString());
} else {
node.addProperty(LAST_RUN_TIME, "");
}
if(agentHealthNode.has(HEALTH_STATUS)) {
node.addProperty(HEALTH_STATUS,agentHealthNode.get(HEALTH_STATUS).getAsString());
} else {
node.addProperty(HEALTH_STATUS,"");
}
agentNodes.add(node);
}
agentDetails.add(AGENT_NODES, agentNodes);
} catch (Exception e) {
log.error("Error getting all agent config ", e);
throw new InsightsCustomException(e.toString());
}
return agentDetails;
}
public JsonObject getAgentHealth(JsonObject agentJson, String agentId) {
GraphDBHandler graphDBHandler = new GraphDBHandler();
JsonObject response;
try {
response = graphDBHandler.executeCypherQueryForJsonResponse("MATCH (n:HEALTH:LATEST) where n.agentId='"+agentId+"' return n order by n.inSightsTime desc limit 1");
JsonArray responseArray = response.get("results").getAsJsonArray();
JsonArray responseData = responseArray.get(0).getAsJsonObject().get("data").getAsJsonArray();
if(responseData.size() > 0) {
agentJson.addProperty(LAST_RUN_TIME, responseData.get(0).getAsJsonObject().get("row").getAsJsonArray().get(0).getAsJsonObject().get(PlatformServiceConstants.INSIGHTSTIMEX).getAsString());
agentJson.addProperty(HEALTH_STATUS, responseData.get(0).getAsJsonObject().get("row").getAsJsonArray().get(0).getAsJsonObject().get(PlatformServiceConstants.STATUS).getAsString());
}
} catch (Exception e) {
log.error("Error getting agent health ", e);
}
return agentJson;
}
}
|
|
/*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.algorithm;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.PriorityQueue;
import org.deidentifier.arx.algorithm.FLASHPhaseConfiguration.PhaseAnonymityProperty;
import org.deidentifier.arx.framework.check.NodeChecker;
import org.deidentifier.arx.framework.check.groupify.HashGroupify;
import org.deidentifier.arx.framework.lattice.DependentAction;
import org.deidentifier.arx.framework.lattice.SolutionSpace;
import org.deidentifier.arx.framework.lattice.Transformation;
import org.deidentifier.arx.metric.InformationLoss;
import org.deidentifier.arx.metric.InformationLossWithBound;
import cern.colt.GenericSorting;
import cern.colt.Swapper;
import cern.colt.function.IntComparator;
import cern.colt.list.LongArrayList;
import com.carrotsearch.hppc.IntArrayList;
import de.linearbits.jhpl.JHPLIterator.LongIterator;
import de.linearbits.jhpl.PredictiveProperty;
/**
* This class implements the FLASH algorithm.
*
* @author Fabian Prasser
* @author Florian Kohlmayer
*/
public class FLASHAlgorithmImpl extends AbstractAlgorithm {
/** Configuration for the algorithm's phases. */
protected final FLASHConfiguration config;
/** Are the pointers for a node with id 'index' already sorted?. */
private final int[][] sortedSuccessors;
/** The strategy. */
private final FLASHStrategy strategy;
/** List of nodes that may be used for pruning transformations with insufficient utility. */
private final List<Integer> potentiallyInsufficientUtility;
/** The number of checked transformations */
private int checked = 0;
/**
* Creates a new instance.
*
* @param solutionSpace
* @param checker
* @param strategy
* @param config
*/
public FLASHAlgorithmImpl(SolutionSpace solutionSpace,
NodeChecker checker,
FLASHStrategy strategy,
FLASHConfiguration config) {
super(solutionSpace, checker);
if (solutionSpace.getSize() > Integer.MAX_VALUE) {
throw new IllegalArgumentException();
}
this.checked = 0;
this.solutionSpace.setAnonymityPropertyPredictable(config.isAnonymityPropertyPredicable());
this.strategy = strategy;
this.sortedSuccessors = new int[(int)solutionSpace.getSize()][];
this.config = config;
this.potentiallyInsufficientUtility = this.config.isPruneInsufficientUtility() ?
new LinkedList<Integer>() : null;
}
@Override
public void traverse() {
// Determine configuration for the outer loop
FLASHPhaseConfiguration outerLoopConfiguration;
if (config.isBinaryPhaseRequired()) {
outerLoopConfiguration = config.getBinaryPhaseConfiguration();
} else {
outerLoopConfiguration = config.getLinearPhaseConfiguration();
}
// Set some triggers
checker.getHistory().setStorageStrategy(config.getSnapshotStorageStrategy());
// Initialize
PriorityQueue<Integer> queue = new PriorityQueue<Integer>(solutionSpace.getTop().getLevel() + 1, strategy);
Transformation bottom = solutionSpace.getBottom();
Transformation top = solutionSpace.getTop();
// Check bottom for speed and remember the result to prevent repeated checks
NodeChecker.Result result = checker.check(bottom);
bottom.setProperty(solutionSpace.getPropertyForceSnapshot());
bottom.setData(result);
// For each node in the lattice
for (int level = bottom.getLevel(); level <= top.getLevel(); level++) {
for (int id : getSortedUnprocessedNodes(level, outerLoopConfiguration.getTriggerSkip())) {
// Run the correct phase
Transformation transformation = solutionSpace.getTransformation(id);
if (config.isBinaryPhaseRequired()) {
binarySearch(transformation, queue);
} else {
linearSearch(transformation);
}
}
}
// Potentially allows to better estimate utility in the lattice
computeUtilityForMonotonicMetrics(bottom);
computeUtilityForMonotonicMetrics(top);
// Remove the associated result information to leave the lattice in a consistent state
bottom.setData(null);
// Clear list of pruning candidates
if (potentiallyInsufficientUtility != null) {
potentiallyInsufficientUtility.clear();
}
}
/**
* Implements the FLASH algorithm (without outer loop).
*
* @param transformation
* @param queue
*/
private void binarySearch(Transformation transformation, PriorityQueue<Integer> queue) {
// Obtain node action
DependentAction triggerSkip = config.getBinaryPhaseConfiguration().getTriggerSkip();
// Add to queue
queue.add((int)transformation.getIdentifier());
// While queue is not empty
while (!queue.isEmpty()) {
// Remove head and process
transformation = solutionSpace.getTransformation(queue.poll());
if (!skip(triggerSkip, transformation)) {
// First phase
List<Transformation> path = findPath(transformation, triggerSkip);
transformation = checkPath(path, triggerSkip, queue);
// Second phase
if (config.isLinearPhaseRequired() && (transformation != null)) {
// Run linear search on head
linearSearch(transformation);
}
}
}
}
/**
* Checks and tags the given transformation.
*
* @param transformation
* @param configuration
*/
private void checkAndTag(Transformation transformation, FLASHPhaseConfiguration configuration) {
// Check or evaluate
if (configuration.getTriggerEvaluate().appliesTo(transformation)) {
InformationLossWithBound<?> loss = checker.getMetric().getInformationLoss(transformation, (HashGroupify)null);
transformation.setInformationLoss(loss.getInformationLoss());
transformation.setLowerBound(loss.getLowerBound());
if (loss.getLowerBound() == null) {
transformation.setLowerBound(checker.getMetric().getLowerBound(transformation));
}
} else if (configuration.getTriggerCheck().appliesTo(transformation)) {
transformation.setChecked(checker.check(transformation));
progress((double)++checked / (double)solutionSpace.getSize());
}
// Store optimum
trackOptimum(transformation);
// Tag
configuration.getTriggerTag().apply(transformation);
// Potentially prune some parts of the search space
prune(transformation);
}
/**
* Checks a path binary.
*
* @param path The path
* @param triggerSkip
* @param queue
* @return
*/
private Transformation checkPath(List<Transformation> path, DependentAction triggerSkip, PriorityQueue<Integer> queue) {
// Obtain anonymity property
PredictiveProperty anonymityProperty = config.getBinaryPhaseConfiguration().getAnonymityProperty() == PhaseAnonymityProperty.ANONYMITY ?
solutionSpace.getPropertyAnonymous() : solutionSpace.getPropertyKAnonymous();
// Init
int low = 0;
int high = path.size() - 1;
Transformation lastAnonymousTransformation = null;
// While not done
while (low <= high) {
// Init
final int mid = (low + high) / 2;
final Transformation transformation = path.get(mid);
// Skip
if (!skip(triggerSkip, transformation)) {
// Check and tag
checkAndTag(transformation, config.getBinaryPhaseConfiguration());
// Add nodes to queue
if (!transformation.hasProperty(anonymityProperty)) {
for (final int up : getSortedSuccessors(transformation)) {
if (!skip(triggerSkip, solutionSpace.getTransformation(up))) {
queue.add(up);
}
}
}
// Binary search
if (transformation.hasProperty(anonymityProperty)) {
lastAnonymousTransformation = transformation;
high = mid - 1;
} else {
low = mid + 1;
}
} else {
high = mid - 1;
}
}
return lastAnonymousTransformation;
}
/**
* Greedily finds a path to the top node.
*
* @param current The node to start the path with. Will be included
* @param triggerSkip All nodes to which this trigger applies will be skipped
* @return The path as a list
*/
private List<Transformation> findPath(Transformation current, DependentAction triggerSkip) {
List<Transformation> path = new ArrayList<Transformation>();
path.add(current);
boolean found = true;
while (found) {
found = false;
for (final int id : getSortedSuccessors(current)) {
Transformation next = solutionSpace.getTransformation(id);
if (!skip(triggerSkip, next)) {
current = next;
path.add(next);
found = true;
break;
}
}
}
return path;
}
/**
* Returns all transformations that do not have the given property and sorts the resulting array
* according to the strategy.
*
* @param level The level which is to be sorted
* @param triggerSkip The trigger to be used for limiting the number of nodes to be sorted
* @return A sorted array of nodes remaining on this level
*/
private int[] getSortedUnprocessedNodes(int level, DependentAction triggerSkip) {
// Create
IntArrayList list = new IntArrayList();
for (LongIterator iter = solutionSpace.unsafeGetLevel(level); iter.hasNext();) {
long id = iter.next();
if (!skip(triggerSkip, solutionSpace.getTransformation(id))) {
list.add((int)id);
}
}
// Copy & sort
int[] array = new int[list.size()];
System.arraycopy(list.buffer, 0, array, 0, list.elementsCount);
sort(array);
return array;
}
/**
* Implements a depth-first search with predictive tagging.
*
* @param transformation
*/
private void linearSearch(Transformation transformation) {
// Obtain node action
DependentAction triggerSkip = config.getLinearPhaseConfiguration().getTriggerSkip();
// Skip this node
if (!skip(triggerSkip, transformation)) {
// Check and tag
checkAndTag(transformation, config.getLinearPhaseConfiguration());
// DFS
for (final int child : getSortedSuccessors(transformation)) {
Transformation childTransformation = solutionSpace.getTransformation(child);
if (!skip(triggerSkip, childTransformation)) {
linearSearch(childTransformation);
}
}
}
// Mark as successors pruned
transformation.setProperty(solutionSpace.getPropertySuccessorsPruned());
}
/**
* We may be able to prune some transformations based on weak lower bounds on
* the monotonic share of a node's information loss.
*
* @param node
*/
private void prune(Transformation node) {
// Check if pruning is enabled
if (potentiallyInsufficientUtility == null) {
return;
}
// There is no need to do anything, if we do not have a lower bound
if (node.getLowerBound() == null) {
return;
}
// Extract some data
Transformation optimalTransformation = getGlobalOptimum();
// There is no need to do anything, if the transformation that was just checked was already pruned
if ((node != optimalTransformation) && node.hasProperty(solutionSpace.getPropertySuccessorsPruned())) {
return;
}
// If we haven't yet found an optimum, we simply add the node to the list of pruning candidates
if (optimalTransformation == null) {
potentiallyInsufficientUtility.add((int)node.getIdentifier());
return;
}
// Extract some data
InformationLoss<?> optimalInfoLoss = optimalTransformation.getInformationLoss();
// If the current node is not the new optimum, we simply check it
if (node != optimalTransformation) {
// Prune it
if (optimalInfoLoss.compareTo(node.getLowerBound()) <= 0) {
node.setProperty(solutionSpace.getPropertyInsufficientUtility());
node.setProperty(solutionSpace.getPropertySuccessorsPruned());
// Else, we store it as a future pruning candidate
} else {
potentiallyInsufficientUtility.add((int)node.getIdentifier());
}
// If the current node is our new optimum, we check all candidates
} else {
// For each candidate
Iterator<Integer> iterator = potentiallyInsufficientUtility.iterator();
while (iterator.hasNext()) {
Integer current = iterator.next();
// Remove the candidate, if it was already pruned in the meantime
Transformation currentTransformation = solutionSpace.getTransformation(current);
if (currentTransformation.hasProperty(solutionSpace.getPropertySuccessorsPruned())) {
iterator.remove();
// Else, check if we can prune it
} else if (optimalInfoLoss.compareTo(currentTransformation.getLowerBound()) <= 0) {
currentTransformation.setProperty(solutionSpace.getPropertyInsufficientUtility());
currentTransformation.setProperty(solutionSpace.getPropertySuccessorsPruned());
iterator.remove();
}
}
// The current optimum is a future pruning candidate
if (!node.hasProperty(solutionSpace.getPropertySuccessorsPruned())) {
potentiallyInsufficientUtility.add((int)node.getIdentifier());
}
}
}
/**
* Returns whether a node should be skipped.
*
* @param transformation
* @param identifier
* @return
*/
private boolean skip(DependentAction trigger, Transformation transformation) {
// If the trigger applies, skip
if (trigger.appliesTo(transformation)) {
return true;
}
// Check if pruning is enabled
if (potentiallyInsufficientUtility == null) {
return false;
}
// Check, if we can prune based on a monotonic sub-metric
if (!checker.getConfiguration().isPracticalMonotonicity() && (getGlobalOptimum() != null)) {
// We skip, if we already know that this node has insufficient utility
if (transformation.hasProperty(solutionSpace.getPropertyInsufficientUtility())) {
return true;
}
// Check whether a lower bound exists
InformationLoss<?> lowerBound = transformation.getLowerBound();
if (lowerBound == null) {
lowerBound = checker.getMetric().getLowerBound(transformation);
if (lowerBound != null) {
transformation.setLowerBound(lowerBound);
}
}
// Check whether this node has insufficient utility, if a lower bound exists
if (lowerBound != null) {
if (getGlobalOptimum().getInformationLoss().compareTo(lowerBound) <= 0) {
transformation.setProperty(solutionSpace.getPropertyInsufficientUtility());
transformation.setProperty(solutionSpace.getPropertySuccessorsPruned());
return true;
}
}
}
// We need to process this node
return false;
}
/**
* Sorts a given array of transformation identifiers.
*
* @param array
*/
private void sort(final int[] array) {
GenericSorting.mergeSort(0, array.length, new IntComparator(){
@Override
public int compare(int arg0, int arg1) {
return strategy.compare(array[arg0], array[arg1]);
}
}, new Swapper(){
@Override
public void swap(int arg0, int arg1) {
int temp = array[arg0];
array[arg0] = array[arg1];
array[arg1] = temp;
}
});
}
/**
* Sorts pointers to successor nodes according to the strategy.
*
* @param transformation
*/
private int[] getSortedSuccessors(final Transformation transformation) {
int identifier = (int)transformation.getIdentifier();
if (sortedSuccessors[identifier] == null) {
LongArrayList list = transformation.getSuccessors();
int[] result = new int[list.size()];
for (int i=0; i<list.size(); i++) {
result[i] = (int)list.getQuick(i);
}
sort(result);
sortedSuccessors[identifier] = result;
}
return sortedSuccessors[identifier];
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mobilitydata.gtfsvalidator.processor;
import static org.mobilitydata.gtfsvalidator.processor.FieldNameConverter.byKeyMapName;
import static org.mobilitydata.gtfsvalidator.processor.FieldNameConverter.byKeyMethodName;
import static org.mobilitydata.gtfsvalidator.processor.FieldNameConverter.fieldNameField;
import static org.mobilitydata.gtfsvalidator.processor.FieldNameConverter.hasMethodName;
import static org.mobilitydata.gtfsvalidator.processor.GtfsEntityClasses.TABLE_PACKAGE_NAME;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeName;
import com.squareup.javapoet.TypeSpec;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;
import javax.lang.model.element.Modifier;
import org.mobilitydata.gtfsvalidator.annotation.Generated;
import org.mobilitydata.gtfsvalidator.notice.DuplicateKeyNotice;
import org.mobilitydata.gtfsvalidator.notice.MoreThanOneEntityNotice;
import org.mobilitydata.gtfsvalidator.notice.NoticeContainer;
import org.mobilitydata.gtfsvalidator.parsing.CsvHeader;
import org.mobilitydata.gtfsvalidator.table.GtfsTableContainer;
/**
* Generates code for a container for a loaded GTFS table.
*
* <p>E.g., GtfsStopTableContainer class is generated for "stops.txt".
*/
public class TableContainerGenerator {
private final GtfsFileDescriptor fileDescriptor;
private final GtfsEntityClasses classNames;
public TableContainerGenerator(GtfsFileDescriptor fileDescriptor) {
this.fileDescriptor = fileDescriptor;
this.classNames = new GtfsEntityClasses(fileDescriptor);
}
private static void addListMultimapWithGetters(
TypeSpec.Builder typeSpec, GtfsFieldDescriptor indexField, TypeName entityTypeName) {
addListMultimapWithGetters(typeSpec, indexField, null, entityTypeName);
}
private static void addListMultimapWithGetters(
TypeSpec.Builder typeSpec,
GtfsFieldDescriptor indexField,
@Nullable GtfsFieldDescriptor sequenceField,
TypeName entityTypeName) {
TypeName keyMapType =
ParameterizedTypeName.get(
ClassName.get(ListMultimap.class), TypeName.get(indexField.javaType()), entityTypeName);
String methodName = byKeyMethodName(indexField.name());
String fieldName = byKeyMapName(indexField.name());
typeSpec.addField(
FieldSpec.builder(keyMapType, fieldName, Modifier.PRIVATE)
.initializer("$T.create()", ParameterizedTypeName.get(ArrayListMultimap.class))
.build());
String sortedBy =
sequenceField != null
? " sorted by " + FieldNameConverter.gtfsColumnName(sequenceField.name())
: "";
typeSpec.addMethod(
MethodSpec.methodBuilder(methodName)
.addModifiers(Modifier.PUBLIC)
.addParameter(TypeName.get(indexField.javaType()), "key")
.returns(ParameterizedTypeName.get(ClassName.get(List.class), entityTypeName))
.addStatement("return $L.get(key)", fieldName)
.addJavadoc("@return List of " + entityTypeName + sortedBy)
.build());
typeSpec.addMethod(
MethodSpec.methodBuilder(methodName + "Map")
.addModifiers(Modifier.PUBLIC)
.returns(keyMapType)
.addStatement("return $L", fieldName)
.addJavadoc(
"@return ListMultimap keyed on "
+ FieldNameConverter.gtfsColumnName(indexField.name())
+ " with values that are Lists of "
+ entityTypeName
+ sortedBy)
.build());
}
private static void addMapWithGetter(
TypeSpec.Builder typeSpec, GtfsFieldDescriptor indexField, TypeName entityTypeName) {
String methodName = byKeyMethodName(indexField.name());
String fieldName = byKeyMapName(indexField.name());
TypeName keyMapType =
ParameterizedTypeName.get(
ClassName.get(Map.class), TypeName.get(indexField.javaType()), entityTypeName);
typeSpec.addField(
FieldSpec.builder(keyMapType, fieldName, Modifier.PRIVATE)
.initializer("new $T<>()", ParameterizedTypeName.get(HashMap.class))
.build());
typeSpec.addMethod(
MethodSpec.methodBuilder(methodName)
.addModifiers(Modifier.PUBLIC)
.addParameter(TypeName.get(indexField.javaType()), "key")
.returns(ParameterizedTypeName.get(ClassName.get(Optional.class), entityTypeName))
.addStatement("return Optional.ofNullable($L.getOrDefault(key, null))", fieldName)
.build());
}
private static void addMapByCompositeKey(
TypeSpec.Builder typeSpec,
GtfsFieldDescriptor firstKey,
GtfsFieldDescriptor sequenceKey,
TypeName entityTypeName) {
String methodName = byKeyMethodName(firstKey.name(), sequenceKey.name());
String fieldName = byKeyMapName(firstKey.name(), sequenceKey.name());
TypeName keyMapType =
ParameterizedTypeName.get(
ClassName.get(Map.class), ClassName.get("", "CompositeKey"), entityTypeName);
typeSpec.addField(
FieldSpec.builder(keyMapType, fieldName, Modifier.PRIVATE)
.initializer("new $T<>()", ParameterizedTypeName.get(HashMap.class))
.build());
typeSpec.addMethod(
MethodSpec.methodBuilder(methodName)
.addModifiers(Modifier.PUBLIC)
.addParameter(TypeName.get(firstKey.javaType()), firstKey.name())
.addParameter(TypeName.get(sequenceKey.javaType()), sequenceKey.name())
.returns(entityTypeName)
.addStatement(
"return $L.get(new CompositeKey($L, $L))",
fieldName,
firstKey.name(),
sequenceKey.name())
.build());
}
public JavaFile generateGtfsContainerJavaFile() {
return JavaFile.builder(TABLE_PACKAGE_NAME, generateGtfsContainerClass()).build();
}
public TypeSpec generateGtfsContainerClass() {
TypeName gtfsEntityType = classNames.entityImplementationTypeName();
TypeSpec.Builder typeSpec =
TypeSpec.classBuilder(classNames.tableContainerSimpleName())
.superclass(
ParameterizedTypeName.get(ClassName.get(GtfsTableContainer.class), gtfsEntityType))
.addAnnotation(Generated.class)
.addModifiers(Modifier.PUBLIC, Modifier.FINAL);
typeSpec.addMethod(
MethodSpec.methodBuilder("getEntityClass")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(ParameterizedTypeName.get(ClassName.get(Class.class), gtfsEntityType))
.addStatement("return $T.class", gtfsEntityType)
.build());
typeSpec.addMethod(
MethodSpec.methodBuilder("gtfsFilename")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(String.class)
.addStatement("return $T.FILENAME", classNames.tableLoaderTypeName())
.build());
typeSpec.addMethod(
MethodSpec.methodBuilder("isRequired")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(boolean.class)
.addStatement("return $L", fileDescriptor.required())
.build());
typeSpec.addField(
ParameterizedTypeName.get(ClassName.get(List.class), gtfsEntityType),
"entities",
Modifier.PRIVATE);
typeSpec.addMethod(
MethodSpec.methodBuilder("getEntities")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(ParameterizedTypeName.get(ClassName.get(List.class), gtfsEntityType))
.addStatement("return entities")
.build());
if (fileDescriptor.singleRow()) {
typeSpec.addMethod(
MethodSpec.methodBuilder("getSingleEntity")
.addModifiers(Modifier.PUBLIC)
.returns(classNames.entityImplementationTypeName())
.addStatement("return entities.isEmpty() ? null : entities.get(0)")
.build());
} else if (hasCompositeKey()) {
addListMultimapWithGetters(
typeSpec,
fileDescriptor.firstKey().get(),
fileDescriptor.sequenceKey().get(),
classNames.entityImplementationTypeName());
addMapByCompositeKey(
typeSpec,
fileDescriptor.firstKey().get(),
fileDescriptor.sequenceKey().get(),
classNames.entityImplementationTypeName());
} else if (fileDescriptor.primaryKey().isPresent()) {
addMapWithGetter(
typeSpec, fileDescriptor.primaryKey().get(), classNames.entityImplementationTypeName());
}
for (GtfsFieldDescriptor indexField : fileDescriptor.indices()) {
addListMultimapWithGetters(typeSpec, indexField, classNames.entityImplementationTypeName());
}
typeSpec.addMethod(generateConstructorWithEntities());
typeSpec.addMethod(generateConstructorWithStatus());
typeSpec.addMethod(generateSetupIndicesMethod());
typeSpec.addMethod(generateForHeaderAndEntitiesMethod());
typeSpec.addMethod(generateForEntitiesMethod());
typeSpec.addMethod(generateGetKeyColumnNames());
typeSpec.addMethod(generateByPrimaryKey());
if (hasCompositeKey()) {
typeSpec.addType(compositeKeyClass());
}
return typeSpec.build();
}
private boolean hasCompositeKey() {
return fileDescriptor.sequenceKey().isPresent() && fileDescriptor.firstKey().isPresent();
}
private TypeSpec compositeKeyClass() {
TypeSpec.Builder keySpec = TypeSpec.classBuilder("CompositeKey").addModifiers(Modifier.STATIC);
TypeName firstKeyType = TypeName.get(fileDescriptor.firstKey().get().javaType());
TypeName sequenceKeyType = TypeName.get(fileDescriptor.sequenceKey().get().javaType());
keySpec.addField(firstKeyType, "firstKey", Modifier.FINAL, Modifier.PRIVATE);
keySpec.addField(sequenceKeyType, "sequenceKey", Modifier.FINAL, Modifier.PRIVATE);
keySpec.addMethod(
MethodSpec.constructorBuilder()
.addParameter(firstKeyType, "firstKey")
.addParameter(sequenceKeyType, "sequenceKey")
.addStatement("this.firstKey = firstKey")
.addStatement("this.sequenceKey = sequenceKey")
.build());
keySpec.addMethod(
MethodSpec.methodBuilder("equals")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.addParameter(Object.class, "obj")
.returns(boolean.class)
.beginControlFlow("if (obj == this)")
.addStatement("return true")
.endControlFlow()
.beginControlFlow("if (obj instanceof CompositeKey)")
.addStatement("CompositeKey other = (CompositeKey) obj")
.addStatement(
"return $T.equals(firstKey, other.firstKey) && sequenceKey == other.sequenceKey",
Objects.class)
.endControlFlow()
.addStatement("return false")
.build());
keySpec.addMethod(
MethodSpec.methodBuilder("hashCode")
.addModifiers(Modifier.PUBLIC)
.addAnnotation(Override.class)
.returns(int.class)
.addStatement("return $T.hash(firstKey, sequenceKey)", Objects.class)
.build());
return keySpec.build();
}
private MethodSpec generateGetKeyColumnNames() {
return MethodSpec.methodBuilder("getKeyColumnNames")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(ParameterizedTypeName.get(ImmutableList.class, String.class))
.addStatement("return $T.KEY_COLUMN_NAMES", classNames.tableLoaderTypeName())
.build();
}
private MethodSpec generateByPrimaryKey() {
MethodSpec.Builder method =
MethodSpec.methodBuilder("byPrimaryKey")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(
ParameterizedTypeName.get(
ClassName.get(Optional.class), classNames.entityImplementationTypeName()))
.addParameter(String.class, "id")
.addParameter(String.class, "subId");
if (fileDescriptor.primaryKey().isPresent()) {
method.addStatement(
"return Optional.ofNullable($L.getOrDefault(id, null))",
byKeyMapName(fileDescriptor.primaryKey().get().name()));
} else if (hasCompositeKey()) {
GtfsFieldDescriptor firstKey = fileDescriptor.firstKey().get();
GtfsFieldDescriptor sequenceKey = fileDescriptor.sequenceKey().get();
method
.beginControlFlow("try")
.addStatement(
"return Optional.ofNullable($L.getOrDefault(new CompositeKey(id,"
+ " $T.parseInt(subId)), null))",
byKeyMapName(firstKey.name(), sequenceKey.name()),
TypeName.get(sequenceKey.javaType()).box())
.nextControlFlow("catch (NumberFormatException e)")
.addStatement("return Optional.empty()")
.endControlFlow();
} else if (fileDescriptor.singleRow()) {
method.addStatement(
"return entities.isEmpty() ? Optional.empty() : Optional.of(entities.get(0))");
} else {
method.addStatement("return Optional.empty()");
}
return method.build();
}
private MethodSpec generateConstructorWithEntities() {
return MethodSpec.constructorBuilder()
.addModifiers(Modifier.PRIVATE)
.addParameter(CsvHeader.class, "header")
.addParameter(
ParameterizedTypeName.get(
ClassName.get(List.class), classNames.entityImplementationTypeName()),
"entities")
.addStatement("super(TableStatus.PARSABLE_HEADERS_AND_ROWS, header)")
.addStatement("this.entities = entities")
.build();
}
private MethodSpec generateConstructorWithStatus() {
return MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(GtfsTableContainer.TableStatus.class, "tableStatus")
.addStatement("super(tableStatus, $T.EMPTY)", CsvHeader.class)
.addStatement("this.entities = new $T<>()", ArrayList.class)
.build();
}
private MethodSpec generateForHeaderAndEntitiesMethod() {
TypeName tableContainerTypeName = classNames.tableContainerTypeName();
return MethodSpec.methodBuilder("forHeaderAndEntities")
.returns(tableContainerTypeName)
.addJavadoc("Creates a table with given header and entities")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.addParameter(CsvHeader.class, "header")
.addParameter(
ParameterizedTypeName.get(
ClassName.get(List.class), classNames.entityImplementationTypeName()),
"entities")
.addParameter(NoticeContainer.class, "noticeContainer")
.addStatement(
"$T table = new $T(header, entities)", tableContainerTypeName, tableContainerTypeName)
.addStatement("table.setupIndices(noticeContainer)")
.addStatement("return table")
.build();
}
private MethodSpec generateForEntitiesMethod() {
TypeName tableContainerTypeName = classNames.tableContainerTypeName();
return MethodSpec.methodBuilder("forEntities")
.returns(tableContainerTypeName)
.addJavadoc(
"Creates a table with given entities and empty header. This method is intended to be"
+ " used in tests.")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.addParameter(
ParameterizedTypeName.get(
ClassName.get(List.class), classNames.entityImplementationTypeName()),
"entities")
.addParameter(NoticeContainer.class, "noticeContainer")
.addStatement(
"return forHeaderAndEntities($T.EMPTY, entities, noticeContainer)", CsvHeader.class)
.build();
}
private MethodSpec generateSetupIndicesMethod() {
TypeName gtfsEntityType = classNames.entityImplementationTypeName();
TypeName loaderType = classNames.tableLoaderTypeName();
MethodSpec.Builder method =
MethodSpec.methodBuilder("setupIndices")
.addModifiers(Modifier.PRIVATE)
.addParameter(NoticeContainer.class, "noticeContainer")
.returns(void.class);
if (fileDescriptor.singleRow()) {
method
.beginControlFlow("if (entities.size() > 1)")
.addStatement(
"noticeContainer.addValidationNotice(new $T(gtfsFilename(), entities.size()))",
MoreThanOneEntityNotice.class)
.endControlFlow();
} else if (hasCompositeKey()) {
GtfsFieldDescriptor firstKey = fileDescriptor.firstKey().get();
GtfsFieldDescriptor sequenceKey = fileDescriptor.sequenceKey().get();
String byCompositeKeyMap = byKeyMapName(firstKey.name(), sequenceKey.name());
method
.beginControlFlow("for ($T newEntity : entities)", gtfsEntityType)
.addStatement(
"CompositeKey key = new CompositeKey(newEntity.$L(), newEntity.$L())",
fileDescriptor.firstKey().get().name(),
fileDescriptor.sequenceKey().get().name())
.addStatement(
"$T oldEntity = $L.getOrDefault(key, null)",
classNames.entityImplementationTypeName(),
byCompositeKeyMap)
.beginControlFlow("if (oldEntity != null)")
.addStatement(
"noticeContainer.addValidationNotice(new $T("
+ "gtfsFilename(), newEntity.csvRowNumber(), "
+ "oldEntity.csvRowNumber(), "
+ "$T.$L, oldEntity.$L(), "
+ "$T.$L, oldEntity.$L()))",
DuplicateKeyNotice.class,
loaderType,
fieldNameField(firstKey.name()),
firstKey.name(),
loaderType,
fieldNameField(sequenceKey.name()),
sequenceKey.name())
.nextControlFlow("else")
.addStatement("$L.put(key, newEntity)", byCompositeKeyMap)
.endControlFlow()
.endControlFlow();
String byFirstKeyMap = byKeyMapName(firstKey.name());
method.beginControlFlow("for ($T entity : entities)", gtfsEntityType);
method.addStatement("$L.put(entity.$L(), entity)", byFirstKeyMap, firstKey.name());
method.endControlFlow();
method
.beginControlFlow(
"for (List<$T> entityList: $T.asMap($L).values())",
gtfsEntityType,
Multimaps.class,
byFirstKeyMap)
.addStatement(
"entityList.sort((entity1, entity2) -> $T.compare(entity1.$L(), entity2.$L()))",
TypeName.get(sequenceKey.javaType()).box(),
sequenceKey.name(),
sequenceKey.name())
.endControlFlow();
} else if (fileDescriptor.primaryKey().isPresent()) {
GtfsFieldDescriptor primaryKey = fileDescriptor.primaryKey().get();
String byKeyMap = byKeyMapName(primaryKey.name());
method.beginControlFlow("for ($T newEntity : entities)", gtfsEntityType);
method
.beginControlFlow("if (!newEntity.$L())", hasMethodName(primaryKey.name()))
.addStatement("continue")
.endControlFlow()
.addStatement(
"$T oldEntity = $L.getOrDefault(newEntity.$L(), null)",
classNames.entityImplementationTypeName(),
byKeyMap,
primaryKey.name())
.beginControlFlow("if (oldEntity != null)")
.addStatement(
"noticeContainer.addValidationNotice(new $T(gtfsFilename(),"
+ " newEntity.csvRowNumber(), oldEntity.csvRowNumber(), $T.$L, newEntity.$L()))",
DuplicateKeyNotice.class,
loaderType,
fieldNameField(primaryKey.name()),
primaryKey.name())
.nextControlFlow("else")
.addStatement("$L.put(newEntity.$L(), newEntity)", byKeyMap, primaryKey.name())
.endControlFlow();
method.endControlFlow();
}
if (!fileDescriptor.indices().isEmpty()) {
method.beginControlFlow("for ($T entity : entities)", gtfsEntityType);
for (GtfsFieldDescriptor indexField : fileDescriptor.indices()) {
method.addStatement(
"$L.put(entity.$L(), entity)", byKeyMapName(indexField.name()), indexField.name());
}
method.endControlFlow();
}
return method.build();
}
}
|
|
/*
GNU LESSER GENERAL PUBLIC LICENSE
Copyright (C) 2006 The Lobo Project
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Contact info: lobochief@users.sourceforge.net
*/
/*
* Created on May 21, 2005
*/
package org.cobraparser.html.renderer;
import java.awt.Color;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.event.MouseEvent;
import org.cobraparser.html.domimpl.ModelNode;
import org.cobraparser.html.style.RenderState;
final class RBlank extends BaseBoundableRenderable {
// TODO: Is there a need for RBlank's at all?
public final int ascentPlusLeading;
private final FontMetrics fontMetrics;
public RBlank(final ModelNode me, final FontMetrics fm, final RenderableContainer container, final int ascentPlusLeading,
final int width, final int height) {
super(container, me);
this.fontMetrics = fm;
this.ascentPlusLeading = ascentPlusLeading;
// Dimensions set when constructed.
this.width = width;
this.height = height;
}
@Override
protected void invalidateLayoutLocal() {
}
public boolean onMouseClick(final MouseEvent event, final int x, final int y) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onMouseClick(me, event, x, y);
} else {
return true;
}
}
public boolean onDoubleClick(final MouseEvent event, final int x, final int y) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onDoubleClick(me, event, x, y);
} else {
return true;
}
}
public boolean onMousePressed(final MouseEvent event, final int x, final int y) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onMouseDown(me, event, x, y);
} else {
return true;
}
}
public boolean onMouseReleased(final MouseEvent event, final int x, final int y) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onMouseUp(me, event, x, y);
} else {
return true;
}
}
public boolean onMouseDisarmed(final MouseEvent event) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onMouseDisarmed(me, event);
} else {
return true;
}
}
/*
* (non-Javadoc)
*
* @see
* net.sourceforge.xamj.domimpl.markup.Renderable#paint(java.awt.Graphics)
*/
public final void paint(final Graphics g) {
final RenderState rs = this.modelNode.getRenderState();
if (rs.getVisibility() != RenderState.VISIBILITY_VISIBLE) {
// Just don't paint it.
return;
}
final Color bkg = rs.getTextBackgroundColor();
if (bkg != null) {
final Color oldColor = g.getColor();
try {
g.setColor(bkg);
g.fillRect(0, 0, this.width, this.height);
} finally {
g.setColor(oldColor);
}
}
final int td = rs.getTextDecorationMask();
if (td != 0) {
if ((td & RenderState.MASK_TEXTDECORATION_UNDERLINE) != 0) {
final int lineOffset = this.ascentPlusLeading + 2;
g.drawLine(0, lineOffset, this.width, lineOffset);
}
if ((td & RenderState.MASK_TEXTDECORATION_LINE_THROUGH) != 0) {
final FontMetrics fm = this.fontMetrics;
final int lineOffset = fm.getLeading() + ((fm.getAscent() + fm.getDescent()) / 2);
g.drawLine(0, lineOffset, this.width, lineOffset);
}
if ((td & RenderState.MASK_TEXTDECORATION_OVERLINE) != 0) {
final int lineOffset = this.fontMetrics.getLeading();
g.drawLine(0, lineOffset, this.width, lineOffset);
}
if ((td & RenderState.MASK_TEXTDECORATION_BLINK) != 0) {
// TODO
}
}
final Color over = rs.getOverlayColor();
if (over != null) {
final Color oldColor = g.getColor();
try {
g.setColor(over);
g.fillRect(0, 0, width, height);
} finally {
g.setColor(oldColor);
}
}
}
/*
* (non-Javadoc)
*
* @see
* org.xamjwg.html.renderer.BoundableRenderable#paintSelection(java.awt.Graphics
* , boolean, org.xamjwg.html.renderer.RenderablePoint,
* org.xamjwg.html.renderer.RenderablePoint)
*/
public boolean paintSelection(final Graphics g, final boolean inSelection, final RenderableSpot startPoint, final RenderableSpot endPoint) {
if ((this == startPoint.renderable) || (this == endPoint.renderable)) {
if (inSelection) {
return false;
}
} else if (!inSelection) {
return false;
}
g.setColor(SELECTION_COLOR);
g.setXORMode(SELECTION_XOR);
g.fillRect(0, 0, this.width, this.height);
g.setPaintMode();
return true;
}
public boolean extractSelectionText(final StringBuffer buffer, final boolean inSelection, final RenderableSpot startPoint,
final RenderableSpot endPoint) {
if ((this == startPoint.renderable) || (this == endPoint.renderable)) {
if (inSelection) {
return false;
}
} else if (!inSelection) {
return false;
}
buffer.append(' ');
return true;
}
/*
* (non-Javadoc)
*
* @see org.xamjwg.html.renderer.BoundableRenderable#getRenderable(int, int)
*/
public RenderableSpot getLowestRenderableSpot(final int x, final int y) {
return new RenderableSpot(this, x, y);
}
public boolean isContainedByNode() {
return true;
}
public boolean onRightClick(final MouseEvent event, final int x, final int y) {
final ModelNode me = this.modelNode;
if (me != null) {
return HtmlController.getInstance().onContextMenu(me, event, x, y);
} else {
return true;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.aggregation.last;
import org.apache.druid.collections.SerializablePair;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.query.aggregation.Aggregator;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.TestDoubleColumnSelectorImpl;
import org.apache.druid.query.aggregation.TestLongColumnSelector;
import org.apache.druid.query.aggregation.TestObjectColumnSelector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Comparator;
public class DoubleLastAggregationTest extends InitializedNullHandlingTest
{
private DoubleLastAggregatorFactory doubleLastAggFactory;
private DoubleLastAggregatorFactory combiningAggFactory;
private ColumnSelectorFactory colSelectorFactory;
private TestLongColumnSelector timeSelector;
private TestLongColumnSelector customTimeSelector;
private TestDoubleColumnSelectorImpl valueSelector;
private TestObjectColumnSelector objectSelector;
private double[] doubles = {1.1897d, 0.001d, 86.23d, 166.228d};
private long[] times = {8224, 6879, 2436, 7888};
private long[] customTimes = {1, 4, 3, 2};
private SerializablePair[] pairs = {
new SerializablePair<>(52782L, 134.3d),
new SerializablePair<>(65492L, 1232.212d),
new SerializablePair<>(69134L, 18.1233d),
new SerializablePair<>(11111L, 233.5232d)
};
@Before
public void setup()
{
doubleLastAggFactory = new DoubleLastAggregatorFactory("billy", "nilly", null);
combiningAggFactory = (DoubleLastAggregatorFactory) doubleLastAggFactory.getCombiningFactory();
timeSelector = new TestLongColumnSelector(times);
customTimeSelector = new TestLongColumnSelector(customTimes);
valueSelector = new TestDoubleColumnSelectorImpl(doubles);
objectSelector = new TestObjectColumnSelector<>(pairs);
colSelectorFactory = EasyMock.createMock(ColumnSelectorFactory.class);
EasyMock.expect(colSelectorFactory.makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME)).andReturn(timeSelector);
EasyMock.expect(colSelectorFactory.makeColumnValueSelector("customTime")).andReturn(customTimeSelector);
EasyMock.expect(colSelectorFactory.makeColumnValueSelector("nilly")).andReturn(valueSelector);
EasyMock.expect(colSelectorFactory.makeColumnValueSelector("billy")).andReturn(objectSelector);
EasyMock.replay(colSelectorFactory);
}
@Test
public void testDoubleLastAggregator()
{
Aggregator agg = doubleLastAggFactory.factorize(colSelectorFactory);
aggregate(agg);
aggregate(agg);
aggregate(agg);
aggregate(agg);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get();
Assert.assertEquals(times[0], result.lhs.longValue());
Assert.assertEquals(doubles[0], result.rhs, 0.0001);
Assert.assertEquals((long) doubles[0], agg.getLong());
Assert.assertEquals(doubles[0], agg.getDouble(), 0.0001);
}
@Test
public void testDoubleLastAggregatorWithTimeColumn()
{
Aggregator agg = new DoubleLastAggregatorFactory("billy", "nilly", "customTime").factorize(colSelectorFactory);
aggregate(agg);
aggregate(agg);
aggregate(agg);
aggregate(agg);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get();
Assert.assertEquals(customTimes[1], result.lhs.longValue());
Assert.assertEquals(doubles[1], result.rhs, 0.0001);
Assert.assertEquals((long) doubles[1], agg.getLong());
Assert.assertEquals(doubles[1], agg.getDouble(), 0.0001);
}
@Test
public void testDoubleLastBufferAggregator()
{
BufferAggregator agg = doubleLastAggFactory.factorizeBuffered(
colSelectorFactory);
ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSizeWithNulls()]);
agg.init(buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get(buffer, 0);
Assert.assertEquals(times[0], result.lhs.longValue());
Assert.assertEquals(doubles[0], result.rhs, 0.0001);
Assert.assertEquals((long) doubles[0], agg.getLong(buffer, 0));
Assert.assertEquals(doubles[0], agg.getDouble(buffer, 0), 0.0001);
}
@Test
public void testDoubleLastBufferAggregatorWithTimeColumn()
{
BufferAggregator agg = new DoubleLastAggregatorFactory("billy", "nilly", "customTime").factorizeBuffered(
colSelectorFactory);
ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSizeWithNulls()]);
agg.init(buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get(buffer, 0);
Assert.assertEquals(customTimes[1], result.lhs.longValue());
Assert.assertEquals(doubles[1], result.rhs, 0.0001);
Assert.assertEquals((long) doubles[1], agg.getLong(buffer, 0));
Assert.assertEquals(doubles[1], agg.getDouble(buffer, 0), 0.0001);
}
@Test
public void testCombine()
{
SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621);
SerializablePair pair2 = new SerializablePair<>(1467240000L, 785.4);
Assert.assertEquals(pair2, doubleLastAggFactory.combine(pair1, pair2));
}
@Test
public void testComparatorWithNulls()
{
SerializablePair pair1 = new SerializablePair<>(1467225000L, 3.621);
SerializablePair pair2 = new SerializablePair<>(1467240000L, null);
Comparator comparator = doubleLastAggFactory.getComparator();
Assert.assertEquals(1, comparator.compare(pair1, pair2));
Assert.assertEquals(0, comparator.compare(pair1, pair1));
Assert.assertEquals(0, comparator.compare(pair2, pair2));
Assert.assertEquals(-1, comparator.compare(pair2, pair1));
}
@Test
public void testDoubleLastCombiningAggregator()
{
Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
aggregate(agg);
aggregate(agg);
aggregate(agg);
aggregate(agg);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get();
Pair<Long, Double> expected = (Pair<Long, Double>) pairs[2];
Assert.assertEquals(expected.lhs, result.lhs);
Assert.assertEquals(expected.rhs, result.rhs, 0.0001);
Assert.assertEquals(expected.rhs.longValue(), agg.getLong());
Assert.assertEquals(expected.rhs, agg.getDouble(), 0.0001);
}
@Test
public void testDoubleLastCombiningBufferAggregator()
{
BufferAggregator agg = combiningAggFactory.factorizeBuffered(
colSelectorFactory);
ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSizeWithNulls()]);
agg.init(buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
aggregate(agg, buffer, 0);
Pair<Long, Double> result = (Pair<Long, Double>) agg.get(buffer, 0);
Pair<Long, Double> expected = (Pair<Long, Double>) pairs[2];
Assert.assertEquals(expected.lhs, result.lhs);
Assert.assertEquals(expected.rhs, result.rhs, 0.0001);
Assert.assertEquals(expected.rhs.longValue(), agg.getLong(buffer, 0));
Assert.assertEquals(expected.rhs, agg.getDouble(buffer, 0), 0.0001);
}
@Test
public void testSerde() throws Exception
{
DefaultObjectMapper mapper = new DefaultObjectMapper();
String doubleSpecJson = "{\"type\":\"doubleLast\",\"name\":\"billy\",\"fieldName\":\"nilly\"}";
AggregatorFactory deserialized = mapper.readValue(doubleSpecJson, AggregatorFactory.class);
Assert.assertEquals(doubleLastAggFactory, deserialized);
Assert.assertArrayEquals(doubleLastAggFactory.getCacheKey(), deserialized.getCacheKey());
}
private void aggregate(
Aggregator agg
)
{
agg.aggregate();
timeSelector.increment();
customTimeSelector.increment();
valueSelector.increment();
objectSelector.increment();
}
private void aggregate(
BufferAggregator agg,
ByteBuffer buff,
int position
)
{
agg.aggregate(buff, position);
timeSelector.increment();
customTimeSelector.increment();
valueSelector.increment();
objectSelector.increment();
}
}
|
|
package com.viesis.viescraft.common.entity.airships;
import java.util.List;
import com.viesis.viescraft.api.EnumsVC;
import com.viesis.viescraft.api.util.LogHelper;
import com.viesis.viescraft.client.InitSoundEventsVC;
import com.viesis.viescraft.configs.ViesCraftConfig;
import com.viesis.viescraft.init.InitItemsVC;
import com.viesis.viescraft.network.NetworkHandler;
import com.viesis.viescraft.network.server.airship.MessageDropBombBig;
import com.viesis.viescraft.network.server.airship.MessageDropBombScatter;
import com.viesis.viescraft.network.server.airship.MessageDropBombSmall;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.MoverType;
import net.minecraft.entity.passive.EntityWaterMob;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.EntitySelectors;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.text.TextFormatting;
import net.minecraft.world.World;
import net.minecraftforge.items.ItemStackHandler;
public class EntityAirshipCore extends EntityAirshipBaseVC {
float finalAirshipSpeedTurn = 0.18F;
float finalAirshipSpeedForward = 0.01F;
float finalAirshipSpeedUp = 0.004F;
float finalAirshipSpeedDown = 0.004F;
int bombDropTimer;
public EntityAirshipCore(World worldIn)
{
super(worldIn);
}
public EntityAirshipCore(World worldIn, double x, double y, double z,
int coreTierIn, int frameTierIn, int engineTierIn, int balloonTierIn,
int moduleSlot1In,
int fuelIn, int fuelTotalIn, int redstoneIn, int redstoneTotalIn,
int bombType1, int bombType2, int bombType3,
int coreModelVisualFrameIn,
int coreModelVisualEngineIn,
int coreModelVisualBalloonIn,
int frameSkinTextureIn,
boolean frameSkinTransparentIn,
boolean frameSkinColorIn,
int frameSkinColorRedIn, int frameSkinColorGreenIn, int frameSkinColorBlueIn,
int engineParticleVisualIn,
int engineDisplayTypeVisualIn,
int engineDisplayItemStackVisualIn, int engineDisplayItemStackMetaVisualIn,
int engineDisplayHeadVisualIn,
int engineDisplaySupporterHeadVisualIn,
int engineDisplayHolidayVisualIn,
int balloonPatternTextureIn,
boolean balloonPatternTransparentIn,
boolean balloonPatternColorIn,
int balloonPatternColorRedIn, int balloonPatternColorGreenIn, int balloonPatternColorBlueIn,
boolean learnedModuleAltitudeIn, int selectedModuleAltitudeIn,
boolean learnedModuleSpeedIn, int selectedModuleSpeedIn,
boolean learnedModuleStorageIn, int selectedModuleStorageIn,
boolean learnedModuleFuelIn, int selectedModuleFuelIn,
boolean learnedModuleMusicIn, int selectedModuleMusicIn,
boolean learnedModuleCruiseIn, int selectedModuleCruiseIn,
boolean learnedModuleWaterIn, int selectedModuleWaterIn,
boolean learnedModuleFuelInfiniteIn, int selectedModuleFuelInfiniteIn,
boolean learnedModuleBombIn, int selectedModuleBombIn,
NBTTagCompound compoundIn, String customNameIn)
{
this(worldIn);
this.setPosition(x, y + 0.5D, z);
this.mainTierCore = coreTierIn;
this.mainTierFrame = frameTierIn;
this.mainTierEngine = engineTierIn;
this.mainTierBalloon = balloonTierIn;
this.moduleActiveSlot1 = moduleSlot1In;
this.storedFuel = fuelIn;
this.storedFuelTotal = fuelTotalIn;
this.storedRedstone = redstoneIn;
this.storedRedstoneTotal = redstoneTotalIn;
this.storedBombType1 = bombType1;
this.storedBombType2 = bombType2;
this.storedBombType3 = bombType3;
this.bombTypeActive = 1;
this.coreModelVisualFrame = coreModelVisualFrameIn;
this.coreModelVisualEngine = coreModelVisualEngineIn;
this.coreModelVisualBalloon = coreModelVisualBalloonIn;
this.frameSkinTexture = frameSkinTextureIn;
this.frameSkinTransparent = frameSkinTransparentIn;
this.frameSkinColor = frameSkinColorIn;
this.frameSkinColorRed = frameSkinColorRedIn;
this.frameSkinColorGreen = frameSkinColorGreenIn;
this.frameSkinColorBlue = frameSkinColorBlueIn;
this.engineParticleVisual = engineParticleVisualIn;
this.engineDisplayTypeVisual = engineDisplayTypeVisualIn;
this.engineDisplayItemstackVisual = engineDisplayItemStackVisualIn;
this.engineDisplayItemstackMetaVisual = engineDisplayItemStackMetaVisualIn;
this.engineDisplayHeadVisual = engineDisplayHeadVisualIn;
this.engineDisplaySupporterHeadVisual = engineDisplaySupporterHeadVisualIn;
this.engineDisplayHolidayVisual = engineDisplayHolidayVisualIn;
this.balloonPatternTexture = balloonPatternTextureIn;
this.balloonPatternTransparent = balloonPatternTransparentIn;
this.balloonPatternColor = balloonPatternColorIn;
this.balloonPatternColorRed = balloonPatternColorRedIn;
this.balloonPatternColorGreen = balloonPatternColorGreenIn;
this.balloonPatternColorBlue = balloonPatternColorBlueIn;
this.learnedModuleAltitude = learnedModuleAltitudeIn;
this.selectedModuleAltitude = selectedModuleAltitudeIn;
this.learnedModuleSpeed = learnedModuleSpeedIn;
this.selectedModuleSpeed = selectedModuleSpeedIn;
this.learnedModuleStorage = learnedModuleStorageIn;
this.selectedModuleStorage = selectedModuleStorageIn;
this.learnedModuleFuel = learnedModuleFuelIn;
this.selectedModuleFuel = selectedModuleFuelIn;
this.learnedModuleMusic = learnedModuleMusicIn;
this.selectedModuleMusic = selectedModuleMusicIn;
this.learnedModuleCruise = learnedModuleCruiseIn;
this.selectedModuleCruise = selectedModuleCruiseIn;
this.learnedModuleWater = learnedModuleWaterIn;
this.selectedModuleWater = selectedModuleWaterIn;
this.learnedModuleFuelInfinite = learnedModuleFuelInfiniteIn;
this.selectedModuleFuelInfinite = selectedModuleFuelInfiniteIn;
this.learnedModuleBomb = learnedModuleBombIn;
this.selectedModuleBomb = selectedModuleBombIn;
this.inventory = new ItemStackHandler(size);
this.inventory.deserializeNBT(compoundIn);
this.customName = customNameIn;
this.motionX = 0.0D;
this.motionY = 0.0D;
this.motionZ = 0.0D;
this.prevPosX = x;
this.prevPosY = y;
this.prevPosZ = z;
}
@Override
public void entityInit()
{
super.entityInit();
}
//==================================//
// TODO Item/Name //
//==================================//
@Override
public ItemStack getItemAirship()
{
ItemStack stack = new ItemStack(InitItemsVC.ITEM_AIRSHIP, 1, this.mainTierCore);
stack.setTagCompound(new NBTTagCompound());
stack.getTagCompound().setInteger(rf.MAIN_TIER_FRAME_TAG, this.mainTierFrame);
stack.getTagCompound().setInteger(rf.MAIN_TIER_ENGINE_TAG, this.mainTierEngine);
stack.getTagCompound().setInteger(rf.MAIN_TIER_BALLOON_TAG, this.mainTierBalloon);
stack.getTagCompound().setInteger(rf.CORE_MODEL_VISUAL_FRAME_TAG, this.coreModelVisualFrame);
stack.getTagCompound().setInteger(rf.CORE_MODEL_VISUAL_ENGINE_TAG, this.coreModelVisualEngine);
stack.getTagCompound().setInteger(rf.CORE_MODEL_VISUAL_BALLOON_TAG, this.coreModelVisualBalloon);
stack.getTagCompound().setInteger(rf.FRAME_SKIN_TEXTURE_TAG, this.frameSkinTexture);
stack.getTagCompound().setBoolean(rf.FRAME_SKIN_TRANSPARENT_TAG, this.frameSkinTransparent);
stack.getTagCompound().setBoolean(rf.FRAME_SKIN_COLOR_TAG, this.frameSkinColor);
stack.getTagCompound().setInteger(rf.FRAME_SKIN_COLOR_RED_TAG, this.frameSkinColorRed);
stack.getTagCompound().setInteger(rf.FRAME_SKIN_COLOR_GREEN_TAG, this.frameSkinColorGreen);
stack.getTagCompound().setInteger(rf.FRAME_SKIN_COLOR_BLUE_TAG, this.frameSkinColorBlue);
stack.getTagCompound().setInteger(rf.ENGINE_PARTICLE_VISUAL_TAG, this.engineParticleVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_TYPE_VISUAL_TAG, this.engineDisplayTypeVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_ITEMSTACK_VISUAL_TAG, this.engineDisplayItemstackVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_ITEMSTACK_META_VISUAL_TAG, this.engineDisplayItemstackMetaVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_HEAD_VISUAL_TAG, this.engineDisplayHeadVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_SUPPORTER_HEAD_VISUAL_TAG, this.engineDisplaySupporterHeadVisual);
stack.getTagCompound().setInteger(rf.ENGINE_DISPLAY_HOLIDAY_VISUAL_TAG, this.engineDisplayHolidayVisual);
stack.getTagCompound().setInteger(rf.BALLOON_PATTERN_TEXTURE_TAG, this.balloonPatternTexture);
stack.getTagCompound().setBoolean(rf.BALLOON_PATTERN_TRANSPARENT_TAG, this.balloonPatternTransparent);
stack.getTagCompound().setBoolean(rf.BALLOON_PATTERN_COLOR_TAG, this.balloonPatternColor);
stack.getTagCompound().setInteger(rf.BALLOON_PATTERN_COLOR_RED_TAG, this.balloonPatternColorRed);
stack.getTagCompound().setInteger(rf.BALLOON_PATTERN_COLOR_GREEN_TAG, this.balloonPatternColorGreen);
stack.getTagCompound().setInteger(rf.BALLOON_PATTERN_COLOR_BLUE_TAG, this.balloonPatternColorBlue);
stack.getTagCompound().setInteger(rf.STORED_FUEL_TAG, this.storedFuel);
stack.getTagCompound().setInteger(rf.STORED_FUEL_TOTAL_TAG, this.storedFuelTotal);
stack.getTagCompound().setInteger(rf.FUEL_ITEMSTACK_TAG, this.fuelItemStack);
stack.getTagCompound().setInteger(rf.FUEL_ITEMSTACK_SIZE_TAG, this.fuelItemStackSize);
stack.getTagCompound().setInteger(rf.STORED_REDSTONE_TAG, this.storedRedstone);
stack.getTagCompound().setInteger(rf.STORED_REDSTONE_TOTAL_TAG, this.storedRedstoneTotal);
stack.getTagCompound().setInteger(rf.BOMB_TYPE_1_TAG, this.storedBombType1);
stack.getTagCompound().setInteger(rf.BOMB_TYPE_2_TAG, this.storedBombType2);
stack.getTagCompound().setInteger(rf.BOMB_TYPE_3_TAG, this.storedBombType3);
stack.getTagCompound().setInteger(rf.MODULE_ACTIVE_SLOT1_TAG, this.moduleActiveSlot1);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_ALTITUDE_TAG, this.learnedModuleAltitude);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_ALTITUDE_TAG, this.selectedModuleAltitude);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_SPEED_TAG, this.learnedModuleSpeed);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_SPEED_TAG, this.selectedModuleSpeed);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_STORAGE_TAG, this.learnedModuleStorage);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_STORAGE_TAG, this.selectedModuleStorage);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_FUEL_TAG, this.learnedModuleFuel);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_FUEL_TAG, this.selectedModuleFuel);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_MUSIC_TAG, this.learnedModuleMusic);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_MUSIC_TAG, this.selectedModuleMusic);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_CRUISE_TAG, this.learnedModuleCruise);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_CRUISE_TAG, this.selectedModuleCruise);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_WATER_TAG, this.learnedModuleWater);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_WATER_TAG, this.selectedModuleWater);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_FUELINFINITE_TAG, this.learnedModuleFuelInfinite);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_FUELINFINITE_TAG, this.selectedModuleFuelInfinite);
stack.getTagCompound().setBoolean(rf.LEARNED_MODULE_BOMB_TAG, this.learnedModuleBomb);
stack.getTagCompound().setInteger(rf.SELECTED_MODULE_BOMB_TAG, this.selectedModuleBomb);
stack.getTagCompound().setTag(rf.AIRSHIP_SLOTS_TAG, this.inventory.serializeNBT());
stack.getTagCompound().setString(rf.CUSTOM_NAME_TAG, this.customName);
return stack;
}
/**
* Custom name for Waila.
*/
@Override
public String getName()
{
String airship_name = this.getPrimaryLabelColor(this.getMainTierCore()) + "" + this.getCustomName();
return this.hasCustomName() ? this.customName :
airship_name;
}
protected TextFormatting getPrimaryLabelColor(int stack)
{
TextFormatting stringColorLabel;
switch(stack)
{
case 0:
return stringColorLabel = TextFormatting.GRAY;
case 1:
return stringColorLabel = TextFormatting.WHITE;
case 2:
return stringColorLabel = TextFormatting.YELLOW;
case 3:
return stringColorLabel = TextFormatting.AQUA;
case 4:
return stringColorLabel = TextFormatting.LIGHT_PURPLE;
case 5:
return stringColorLabel = TextFormatting.RED;
default:
return stringColorLabel = TextFormatting.GRAY;
}
}
//==================================//
// TODO On Update //
//==================================//
@Override
public void onUpdate()
{
if(this.getModuleActiveSlot1() != EnumsVC.ModuleType.WATER_LESSER.getMetadata()
|| this.getModuleActiveSlot1() != EnumsVC.ModuleType.WATER_NORMAL.getMetadata()
|| this.getModuleActiveSlot1() != EnumsVC.ModuleType.WATER_GREATER.getMetadata())
{
this.waterDamage();
}
if(this.getModuleActiveSlot1() != EnumsVC.ModuleType.BOMB_LESSER.getMetadata()
|| this.getModuleActiveSlot1() != EnumsVC.ModuleType.BOMB_NORMAL.getMetadata()
|| this.getModuleActiveSlot1() != EnumsVC.ModuleType.BOMB_GREATER.getMetadata())
{
this.bombDropCooldown();
}
this.prevPosX = this.posX;
this.prevPosY = this.posY;
this.prevPosZ = this.posZ;
super.onUpdate();
this.tickLerp();
this.airshipCoreAI();
if (this.canPassengerSteer())
{
this.updateMotion();
this.controlAirship();
this.dropBomb();
if (this.world.isRemote)
{
this.updateInputs();
this.controlAirshipGui();
}
this.move(MoverType.SELF, this.motionX, this.motionY, this.motionZ);
}
else
{
this.motionX = 0.0D;
this.motionY = 0.0D;
this.motionZ = 0.0D;
}
this.doBlockCollisions();
List<Entity> list = this.world.getEntitiesInAABBexcluding(this, this.getEntityBoundingBox().expand(0.20000000298023224D, -0.009999999776482582D, 0.20000000298023224D), EntitySelectors.<Entity>getTeamCollisionPredicate(this));
if (!list.isEmpty())
{
boolean flag = !this.world.isRemote && !(this.getControllingPassenger() instanceof EntityPlayer);
for (int j = 0; j < list.size(); ++j)
{
Entity entity = (Entity)list.get(j);
if (!entity.isPassenger(this))
{
if (flag && this.getPassengers().size() < 2 && !entity.isRiding() && entity.width < this.width && entity instanceof EntityLivingBase && !(entity instanceof EntityWaterMob) && !(entity instanceof EntityPlayer))
if (flag && this.getPassengers().size() < 1 && !entity.isRiding() && entity.width < this.width && entity instanceof EntityLivingBase && !(entity instanceof EntityWaterMob) && !(entity instanceof EntityPlayer))
{
entity.startRiding(this);
}
else
{
this.applyEntityCollision(entity);
}
}
}
}
}
/**
* Handles root method calls for core airship AI logic
*/
private void airshipCoreAI()
{
this.fuelFlight();
this.getTotalFuelSlotBurnTime();
this.currentModule();
this.noInventoryModuleDropItems();
if(ViesCraftConfig.engineSounds)
{
if(this.isFuelBurning())
{
this.world.playSound(this.posX, this.posY, this.posZ, InitSoundEventsVC.ENGINEON, SoundCategory.AMBIENT, 0.4F, (this.rand.nextFloat() - this.rand.nextFloat()) * 0.2F + 0.7F, true);
}
}
if(!(this.getControllingPassenger() instanceof EntityPlayer)
&& this.metaCruiseControlSelectedSpeed != 0)
{
this.metaCruiseControlSelectedSpeed = 0;
this.setCruiseControlSelectedSpeed(this.metaCruiseControlSelectedSpeed);
}
}
@Override
public void controlAirship()
{
if(this.isBeingRidden())
{
float f = 0.0F;
float f1 = 0.0F;
//Turning Left
if(this.leftInputDown)
{
if(isFuelBurning())
{
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_LESSER.getMetadata())
{
this.deltaRotation -= (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 4));
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_NORMAL.getMetadata())
{
this.deltaRotation -= (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 8));
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_GREATER.getMetadata())
{
this.deltaRotation -= (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 12));
}
else
{
this.deltaRotation -= (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() * 2));
}
}
else
{
this.deltaRotation -= (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() * 4)) * 0.5F;
}
}
//Turning Right
if (this.rightInputDown)
{
if(isFuelBurning())
{
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_LESSER.getMetadata())
{
this.deltaRotation += (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 4));
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_NORMAL.getMetadata())
{
this.deltaRotation += (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 8));
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.SPEED_GREATER.getMetadata())
{
this.deltaRotation += (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier * 12));
}
else
{
this.deltaRotation += (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() * 2));
}
}
else
{
this.deltaRotation += (finalAirshipSpeedTurn + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() * 4)) * 0.5F;
}
}
if (this.rightInputDown != this.leftInputDown && !this.forwardInputDown && !this.backInputDown)
{
f += 0.005F;
}
this.rotationYaw += this.deltaRotation;
//Move Forward
//Cruise Control
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_LESSER.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_NORMAL.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_GREATER.getMetadata())
{
if(this.forwardInputDown)
{
if(this.metaCruiseControlSelectedSpeed < 3)
{
this.metaCruiseControlSelectedSpeed++;
}
else
{
this.metaCruiseControlSelectedSpeed = 3;
}
this.setCruiseControlSelectedSpeed(this.metaCruiseControlSelectedSpeed);
if(isFuelBurning())
{
}
else
{
f += 0.003F;
}
}
}
else
{
if(this.forwardInputDown)
{
if(isFuelBurning())
{
f += finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier;
}
else
{
f += 0.003F;
}
}
}
//Moving Backwards
//Cruise Control
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_LESSER.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_NORMAL.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_GREATER.getMetadata())
{
if(this.backInputDown)
{
if(this.metaCruiseControlSelectedSpeed > 0)
{
this.metaCruiseControlSelectedSpeed--;
}
else
{
this.metaCruiseControlSelectedSpeed = 0;
}
this.setCruiseControlSelectedSpeed(this.metaCruiseControlSelectedSpeed);
if(isFuelBurning())
{
f -= (finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier) * 0.5;
}
else
{
f -= 0.003F * 0.5;
}
}
}
else
{
if (this.backInputDown)
{
if(isFuelBurning())
{
f -= (finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier) * 0.5;
}
else
{
f -= 0.003F * 0.5;
}
}
}
//Handles forward movement with the Cruise Control Module
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_LESSER.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_NORMAL.getMetadata()
|| this.getModuleActiveSlot1() == EnumsVC.ModuleType.CRUISE_GREATER.getMetadata())
{
if(isFuelBurning()
&& this.getControllingPassenger() != null)
{
switch(this.metaCruiseControlSelectedSpeed)
{
case 0:
f += 0F;
break;
case 1:
f += (finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier) / 4;
break;
case 2:
f += (finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier) / 2;
break;
case 3:
f += finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() + this.speedModifier;
break;
}
}
else
{
f += 0F;
}
}
//Moving Up
if (this.upInputDown)
{
if(isFuelBurning())
{
if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.ALTITUDE_LESSER.getMetadata())
{
f1 += finalAirshipSpeedUp + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() / 14);
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.ALTITUDE_NORMAL.getMetadata())
{
f1 += finalAirshipSpeedUp + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() / 8);
}
else if(this.getModuleActiveSlot1() == EnumsVC.ModuleType.ALTITUDE_GREATER.getMetadata())
{
f1 += finalAirshipSpeedUp + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() / 4);
}
else if(!this.airshipHeightLimit())
{
f1 += finalAirshipSpeedUp + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() / 32);
}
}
}
//Moving down
if (this.downInputDown)
{
f1 -= finalAirshipSpeedDown + (EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier() / 4) + (this.speedModifier / 4);
}
this.motionX += (double)(MathHelper.sin(-this.rotationYaw * 0.017453292F) * f);
this.motionZ += (double)(MathHelper.cos(this.rotationYaw * 0.017453292F) * f);
this.motionY += (double)(3.017453292F * f1);
this.rotationPitch += 10;
}
}
//==================================//
// TODO Airship Modules //
//==================================//
public void currentModule()
{
int moduleNumber = this.getModuleActiveSlot1();
if(moduleNumber >= 0)
{
this.moduleActiveSlot1 = 0;
if(moduleNumber > 0)
{
this.moduleActiveSlot1 = moduleNumber;
}
if(moduleNumber == EnumsVC.ModuleType.BASE.getMetadata())
{
this.speedModifier = 0;
}
if(moduleNumber == EnumsVC.ModuleType.SPEED_LESSER.getMetadata())
{
this.speedModifier = 0.01F;
}
if(moduleNumber == EnumsVC.ModuleType.SPEED_NORMAL.getMetadata())
{
this.speedModifier = 0.02F;
}
if(moduleNumber == EnumsVC.ModuleType.SPEED_GREATER.getMetadata())
{
this.speedModifier = 0.03F;
}
if(moduleNumber == EnumsVC.ModuleType.STORAGE_LESSER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.30F);
}
if(moduleNumber == EnumsVC.ModuleType.STORAGE_NORMAL.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.30F);
}
if(moduleNumber == EnumsVC.ModuleType.STORAGE_GREATER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.30F);
}
if(moduleNumber == EnumsVC.ModuleType.CRUISE_LESSER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.66F);
}
if(moduleNumber == EnumsVC.ModuleType.CRUISE_NORMAL.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.33F);
}
if(moduleNumber == EnumsVC.ModuleType.CRUISE_GREATER.getMetadata())
{
this.speedModifier = 0;
}
if(moduleNumber == EnumsVC.ModuleType.INFINITE_FUEL_LESSER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.90F);
}
if(moduleNumber == EnumsVC.ModuleType.INFINITE_FUEL_NORMAL.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.80F);
}
if(moduleNumber == EnumsVC.ModuleType.INFINITE_FUEL_GREATER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.70F);
}
if(moduleNumber == EnumsVC.ModuleType.BOMB_LESSER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.75F);
}
if(moduleNumber == EnumsVC.ModuleType.BOMB_NORMAL.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.50F);
}
if(moduleNumber == EnumsVC.ModuleType.BOMB_GREATER.getMetadata())
{
this.speedModifier = -((finalAirshipSpeedForward + EnumsVC.MainTierFrame.byId(this.mainTierFrame).getSpeedModifier()) * 0.25F);
}
}
}
public void dropBomb()
{
if(this.moduleActiveSlot1 == EnumsVC.ModuleType.BOMB_LESSER.getMetadata()
|| this.moduleActiveSlot1 == EnumsVC.ModuleType.BOMB_NORMAL.getMetadata()
|| this.moduleActiveSlot1 == EnumsVC.ModuleType.BOMB_GREATER.getMetadata())
{
if(this.bombArmedToggle
&& this.dropBombInputDown)
{
if(this.bombTypeActive == 1
&& this.storedBombType1 != 0
&& this.canDropBomb)
{
NetworkHandler.sendToServer(new MessageDropBombSmall());
this.canDropBomb = false;
}
if(this.bombTypeActive == 2
&& this.storedBombType2 != 0
&& this.canDropBomb)
{
NetworkHandler.sendToServer(new MessageDropBombBig());
this.canDropBomb = false;
}
if(this.bombTypeActive == 3
&& this.storedBombType3 != 0
&& this.canDropBomb)
{
NetworkHandler.sendToServer(new MessageDropBombScatter());
this.canDropBomb = false;
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.binary.matrix;
import java.util.HashMap;
import org.junit.Test;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.utils.TestUtils;
/**
*
*/
public class QuantileTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "Quantile";
private final static String TEST_NAME2 = "Median";
private final static String TEST_NAME3 = "IQM";
private final static String TEST_DIR = "functions/binary/matrix/";
private final static String TEST_CLASS_DIR = TEST_DIR + QuantileTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows = 1973;
private final static int maxVal = 7;
private final static double sparsity1 = 0.9;
private final static double sparsity2 = 0.3;
@Override
public void setUp()
{
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME1,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "R" }) );
addTestConfiguration(TEST_NAME2,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME2, new String[] { "R" }) );
addTestConfiguration(TEST_NAME3,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME3, new String[] { "R" }) );
}
@Test
public void testQuantile1DenseCP() {
runQuantileTest(TEST_NAME1, 0.25, false, ExecType.CP);
}
@Test
public void testQuantile2DenseCP() {
runQuantileTest(TEST_NAME1, 0.50, false, ExecType.CP);
}
@Test
public void testQuantile3DenseCP() {
runQuantileTest(TEST_NAME1, 0.75, false, ExecType.CP);
}
@Test
public void testQuantile1SparseCP() {
runQuantileTest(TEST_NAME1, 0.25, true, ExecType.CP);
}
@Test
public void testQuantile2SparseCP() {
runQuantileTest(TEST_NAME1, 0.50, true, ExecType.CP);
}
@Test
public void testQuantile3SparseCP() {
runQuantileTest(TEST_NAME1, 0.75, true, ExecType.CP);
}
@Test
public void testQuantile1DenseMR() {
runQuantileTest(TEST_NAME1, 0.25, false, ExecType.MR);
}
@Test
public void testQuantile2DenseMR() {
runQuantileTest(TEST_NAME1, 0.50, false, ExecType.MR);
}
@Test
public void testQuantile3DenseMR() {
runQuantileTest(TEST_NAME1, 0.75, false, ExecType.MR);
}
@Test
public void testQuantile1SparseMR() {
runQuantileTest(TEST_NAME1, 0.25, true, ExecType.MR);
}
@Test
public void testQuantile2SparseMR() {
runQuantileTest(TEST_NAME1, 0.50, true, ExecType.MR);
}
@Test
public void testQuantile3SparseMR() {
runQuantileTest(TEST_NAME1, 0.75, true, ExecType.MR);
}
@Test
public void testQuantile1DenseSP() {
runQuantileTest(TEST_NAME1, 0.25, false, ExecType.SPARK);
}
@Test
public void testQuantile2DenseSP() {
runQuantileTest(TEST_NAME1, 0.50, false, ExecType.SPARK);
}
@Test
public void testQuantile3DenseSP() {
runQuantileTest(TEST_NAME1, 0.75, false, ExecType.SPARK);
}
@Test
public void testQuantile1SparseSP() {
runQuantileTest(TEST_NAME1, 0.25, true, ExecType.SPARK);
}
@Test
public void testQuantile2SparseSP() {
runQuantileTest(TEST_NAME1, 0.50, true, ExecType.SPARK);
}
@Test
public void testQuantile3SparseSP() {
runQuantileTest(TEST_NAME1, 0.75, true, ExecType.SPARK);
}
@Test
public void testMedianDenseCP() {
runQuantileTest(TEST_NAME2, -1, false, ExecType.CP);
}
@Test
public void testMedianSparseCP() {
runQuantileTest(TEST_NAME2, -1, true, ExecType.CP);
}
@Test
public void testMedianDenseMR() {
runQuantileTest(TEST_NAME2, -1, false, ExecType.MR);
}
@Test
public void testMedianSparseMR() {
runQuantileTest(TEST_NAME2, -1, true, ExecType.MR);
}
@Test
public void testMedianDenseSP() {
runQuantileTest(TEST_NAME2, -1, false, ExecType.SPARK);
}
@Test
public void testMedianSparseSP() {
runQuantileTest(TEST_NAME2, -1, true, ExecType.SPARK);
}
@Test
public void testIQMDenseCP() {
runQuantileTest(TEST_NAME3, -1, false, ExecType.CP);
}
@Test
public void testIQMSparseCP() {
runQuantileTest(TEST_NAME3, -1, true, ExecType.CP);
}
@Test
public void testIQMDenseMR() {
runQuantileTest(TEST_NAME3, -1, false, ExecType.MR);
}
@Test
public void testIQMSparseMR() {
runQuantileTest(TEST_NAME3, -1, true, ExecType.MR);
}
@Test
public void testIQMDenseSP() {
runQuantileTest(TEST_NAME3, -1, false, ExecType.SPARK);
}
@Test
public void testIQMSparseSP() {
runQuantileTest(TEST_NAME3, -1, true, ExecType.SPARK);
}
private void runQuantileTest( String TEST_NAME, double p, boolean sparse, ExecType et)
{
//rtplatform for MR
RUNTIME_PLATFORM platformOld = rtplatform;
switch( et ){
case MR: rtplatform = RUNTIME_PLATFORM.HADOOP; break;
case SPARK: rtplatform = RUNTIME_PLATFORM.SPARK; break;
default: rtplatform = RUNTIME_PLATFORM.HYBRID; break;
}
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == RUNTIME_PLATFORM.SPARK )
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try
{
getAndLoadTestConfiguration(TEST_NAME);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-args", input("A"), Double.toString(p), output("R")};
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + p + " "+ expectedDir();
//generate actual dataset (always dense because values <=0 invalid)
double sparsitya = sparse ? sparsity2 : sparsity1;
double[][] A = getRandomMatrix(rows, 1, 1, maxVal, sparsitya, 1236);
writeInputMatrixWithMTD("A", A, true);
runTest(true, false, null, -1);
runRScript(true);
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromFS("R");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
}
finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
}
|
|
import javafx.application.Application;
import javafx.collections.ObservableList;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.layout.GridPane;
import javafx.stage.Stage;
public class Main extends Application
{
private static CallCenter center = new CallCenter();
private static String exitTitle = "Confirm Exit";
private static String exitMessage = "Are you sure you wish to exit?";
Stage window;
Scene mainScene;
GridPane mainLayout;
public static void main (String[] args)
{
SalesCall call01 = new SalesCall("#437436hfd", 23, "open", true, "Joe Wemyss", 5, "Sales", "#044234", 25.12);
SalesCall call03 = new SalesCall("#437436hfd", 23, "open", true, "Joe Wemyss", 5, "Sales", "#044234", 25.12);
SalesCall call04 = new SalesCall("#437436hfd", 23, "open", true, "Joe Wemyss", 5, "Sales", "#044234", 25.12);
SalesCall call05 = new SalesCall("#437436hfd", 23, "open", true, "Joe Wemyss", 5, "Sales", "#044234", 25.12);
SalesCall call06 = new SalesCall("#437436hfd", 23, "open", true, "Joe Wemyss", 5, "Sales", "#044234", 25.12);
SupportCall call02 = new SupportCall("##09iiklkjd", 10, "pending", false, "Michelle Power", 3, "Support", "#99653", "0000009899");
SupportCall call07 = new SupportCall("##09iiklkjd", 10, "pending", false, "Michelle Power", 3, "Support", "#99653", "0000009899");
SupportCall call08 = new SupportCall("##09iiklkjd", 10, "pending", false, "Michelle Power", 3, "Support", "#99653", "0000009899");
SupportCall call09 = new SupportCall("##09iiklkjd", 10, "pending", false, "Michelle Power", 3, "Support", "#99653", "0000009899");
SupportCall call10 = new SupportCall("##09iiklkjd", 10, "pending", false, "Michelle Power", 3, "Support", "#99653", "0000009899");
center.addCall(call01);
center.addCall(call03);
center.addCall(call04);
center.addCall(call05);
center.addCall(call06);
center.addCall(call07);
center.addCall(call08);
center.addCall(call09);
center.addCall(call10);
center.addCall(call02);
launch(args);
}
@Override
public void start (Stage primaryStage) throws Exception
{
window = primaryStage;
window.setTitle("My Application");
window.setOnCloseRequest(event1 -> {
event1.consume();
boolean check = AlertBox.display(exitTitle, exitMessage);
if(check) {
closeProgram(0);
}
});
Label mainLabel = new Label("Call Center System");
Label addLabel = new Label("Add a call");
Label removeLabel = new Label("Remove a call");
Label updateStatusLabel = new Label("Update call status");
Label checkFullLabel = new Label("Check if the call list is full");
Label checkEmptyLabel = new Label("Check if the call list is empty");
Label listAllLabel = new Label("List all calls");
Label detailsLabel = new Label("Call Details");
Label updateSuggestedLabel = new Label("Update suggested Length of calls");
Button addButton = new Button("Go");
Button removeButton = new Button("Go");
Button updateStatusButton = new Button("Go");
Button checkFullButton = new Button("Go");
Button checkEmptyButton = new Button("Go");
Button closeButton = new Button("Go");
Button listAllButton = new Button("Go");
Button detailsButton = new Button("Go");
Button updateSuggestedButton = new Button("Go");
addButton.setOnAction(event -> {
String result = SelectType.display();
});
removeButton.setOnAction(event1 -> {
RemoveCallGui.display();
});
updateStatusButton.setOnAction(event1 -> {
UpdateStatusGui.display();
});
listAllButton.setOnAction(event -> {
TableList.display("All Calls", getAllFromCenter());
});
checkFullButton.setOnAction(event1 -> {
if(center.isFull()){
PopUp.display("Full", "The call center is full.");
} else {
PopUp.display("Not Full", "The call center is not full");
}
});
checkEmptyButton.setOnAction(event1 -> {
if(center.isEmpty()){
PopUp.display("Empty", "The call center is empty");
} else {
PopUp.display("Not Empty", "The call center is not empty");
}
});
detailsButton.setOnAction(event1 -> {
DetailsMenu.display();
});
updateSuggestedButton.setOnAction(event1 -> UpdateSuggestedGui.display());
closeButton.setOnAction(event -> {
boolean check = AlertBox.display(exitTitle, exitMessage);
if(check) {
closeProgram(0);
}
});
GridPane.setConstraints(mainLabel, 0, 1);
GridPane.setConstraints(addLabel, 0, 2);
GridPane.setConstraints(addButton, 1, 2);
GridPane.setConstraints(removeLabel, 0, 3);
GridPane.setConstraints(removeButton, 1, 3);
GridPane.setConstraints(listAllLabel, 0, 4);
GridPane.setConstraints(listAllButton, 1, 4);
GridPane.setConstraints(updateStatusLabel, 0, 5);
GridPane.setConstraints(updateStatusButton, 1, 5);
GridPane.setConstraints(checkFullLabel, 0, 6);
GridPane.setConstraints(checkFullButton, 1, 6);
GridPane.setConstraints(checkEmptyLabel, 0, 7);
GridPane.setConstraints(checkEmptyButton, 1, 7);
GridPane.setConstraints(detailsLabel, 0, 8);
GridPane.setConstraints(detailsButton, 1, 8);
GridPane.setConstraints(updateSuggestedLabel, 0, 9);
GridPane.setConstraints(updateSuggestedButton, 1, 9);
GridPane.setConstraints(closeButton, 1, 12);
mainLayout = new GridPane();
mainLayout.getChildren().addAll(mainLabel, addButton, listAllButton, closeButton, removeButton,
addLabel, listAllLabel, removeLabel, updateStatusLabel, updateStatusButton, checkEmptyButton,
checkEmptyLabel, checkFullButton, checkFullLabel, detailsButton, detailsLabel, updateSuggestedLabel, updateSuggestedButton);
mainLayout.setVgap(10);
mainLayout.setHgap(10);
mainLayout.setAlignment(Pos.CENTER);
mainScene = new Scene(mainLayout, 500, 700);
window.setScene(mainScene);
window.show();
}
public static void closeProgram(int code)
{
System.exit(code);
}
public static String getExitTitle()
{
return exitTitle;
}
public static String getExitMessage ()
{
return exitMessage;
}
public static boolean addToCenter(Call callIn)
{
if(center.addCall(callIn)) {
return true;
}
return false;
}
public static boolean removeFromCenter(String idIn)
{
if(center.removeCall(idIn)){
return true;
}
return false;
}
public static boolean getFromCenter(String idIn)
{
Call callRequested = center.getItem(idIn);
if(callRequested == null){
return false;
}
return true;
}
public static String listAll()
{
String list = center.listAll();
return list;
}
public static String callToString(String idIn)
{
String list;
Call callRequested = center.getItem(idIn);
list = callRequested.toString();
return list;
}
public static Call getLongest()
{
return center.longestCall();
}
public static Call getShortest()
{
return center.shortestCall();
}
public static String aboveSuggested()
{
if(center.listCallsAboveSuggestedLength().trim().length() > 0){
return center.listCallsAboveSuggestedLength();
}
return "No calls above suggestedLength";
}
private static ObservableList<Call> getAllFromCenter()
{
return center.getCalls();
}
public static boolean statusUpdate(String idIn, String status)
{
if(center.updateStatus(idIn, status)){
return true;
}
return false;
}
public static CallCenter getCenter()
{
return center;
}
}
|
|
package io.joynr.test.interlanguage.jee;
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.junit.runner.notification.RunListener;
import org.junit.runner.Description;
import org.junit.runner.notification.Failure;
import org.junit.runner.Result;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import joynr.testresults.TestResult;
import joynr.testresults.TestSuiteResult;
import joynr.testresults.TestCaseResult;
import joynr.testresults.TestCaseFailure;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IltConsumerJUnitListener extends RunListener {
private static final Logger LOG = LoggerFactory.getLogger(IltConsumerJUnitListener.class);
private TestResult testResult = null;
ArrayList<TestCaseResult> testCaseResults = new ArrayList<TestCaseResult>();
static class TestSuiteResultsStore {
public ArrayList<TestCaseResult> testCaseResults = new ArrayList<TestCaseResult>();
public int errors = 0;
public int tests = 0;
public int skipped = 0;
public int failures = 0;
public long consumedTime = 0;
}
// Map testSuiteName -> testSuiteResultsStore
HashMap<String, TestSuiteResultsStore> testSuitesMap = new HashMap<String, TestSuiteResultsStore>();
long startTimeTestCase;
// for manual invocation after the test is finished
public TestResult getTestResult() {
LOG.info(">>> getTestResult called");
LOG.info("<<< getTestResult called");
return testResult;
}
// helper methods
public void printDescription(Description description, int level) {
String spaces = new String(new char[level * 2]).replace('\0', ' ');
if (description == null) {
LOG.info(spaces + "description is null");
} else {
LOG.info(spaces + "description is set");
LOG.info(spaces + "- description.getDisplayName() = " + description.toString());
LOG.info(spaces + "- description.isEmpty() = " + description.isEmpty());
LOG.info(spaces + "- description.isSuite() = " + description.isSuite());
LOG.info(spaces + "- description.isTest() = " + description.isTest());
LOG.info(spaces + "- description.testCount() = " + description.testCount());
ArrayList<Description> children = description.getChildren();
for (int i = 0; i < children.size(); i++) {
printDescription(children.get(i), level + 1);
}
LOG.info(spaces + "- description.toString() = " + description.toString());
}
}
public String getTestSuiteClassName(Description description) {
String fullTestClassName;
Pattern pattern = Pattern.compile("\\((.*)\\)");
Matcher matcher = pattern.matcher(description.getDisplayName());
if (matcher.find()) {
//fullTestClassName = this.getClass().getPackage() + matcher.group(1);
fullTestClassName = matcher.group(1);
} else {
fullTestClassName = description.getDisplayName();
}
return fullTestClassName;
}
public String getTestCaseName(Description description) {
String baseTestClassName;
Pattern pattern = Pattern.compile("(.*)\\(");
Matcher matcher = pattern.matcher(description.getDisplayName());
if (matcher.find()) {
baseTestClassName = matcher.group(1);
} else {
baseTestClassName = description.getDisplayName();
}
return baseTestClassName;
}
public String getFormattedDuration(long duration) {
float floatDuration = (float) duration / 1000.0f;
return String.format("%.4f", floatDuration);
}
public TestSuiteResultsStore getStore(Description description) {
TestSuiteResultsStore store;
// check if suite already exists, if not, create it
if (!testSuitesMap.containsKey(getTestSuiteClassName(description))) {
store = new TestSuiteResultsStore();
testSuitesMap.put(getTestSuiteClassName(description), store);
} else {
store = testSuitesMap.get(getTestSuiteClassName(description));
}
return store;
}
// the following methods are called by JUnit framework
// called when an atomic test flags that it assumes a condition
// that is false
public void testAssumptionFailure(Failure failure) {
LOG.info(">>> testAssumptionFailure called");
Description description = failure.getDescription();
printDescription(description, 1);
// should have been created already in previous call to testStarted
TestSuiteResultsStore store = getStore(description);
store.errors++;
LOG.info("<<< testAssumptionFailure called");
}
// called when an atomic test fails
public void testFailure(Failure failure) {
String fullTestClassName;
String baseTestClassName;
LOG.info(">>> testFailure called");
Description description = failure.getDescription();
printDescription(description, 1);
LOG.info("- failure.getException() = " + failure.getException());
LOG.info("- failure.getMessage() = " + failure.getMessage());
LOG.info("- failure.getTestHeader() = " + failure.getTestHeader());
LOG.info("- failure.getTrace() = " + failure.getTrace());
if (description == null || description.getDisplayName() == null) {
LOG.info("<<< testFinished called");
return;
}
// should have been created already in previous call to testStarted
TestSuiteResultsStore store = getStore(description);
TestCaseFailure testCaseFailure = new TestCaseFailure(failure.getMessage(), // message
failure.getException().toString(), // type
failure.getTrace() // text
);
TestCaseResult testCaseResult = new TestCaseResult(getTestCaseName(description),
getTestSuiteClassName(description),
null, // test not finished yet, will be updated later
"failed", // status
testCaseFailure, // failure
null // no systemOut
);
store.testCaseResults.add(testCaseResult);
store.failures++;
// everything else will be done in testFinished, which is also
// called for failed tests as well.
LOG.info("<<< testFailure called");
}
// called when an atomic test is to be started
public void testStarted(Description description) {
// A Description describes a test which is to be run or has been run. Descriptions can be atomic (a single test) or compound (containing children tests).
LOG.info(">>> testStarted called");
// assume that one tests runs at a time.
// it is then sufficient to store a global start time instead of a
// test case specific start time at this point
startTimeTestCase = System.currentTimeMillis();
printDescription(description, 1);
LOG.info("<<< testStarted called");
}
// called when a test has finished, whether the test succeeds or fails
public void testFinished(Description description) {
LOG.info(">>> testFinished called");
printDescription(description, 1);
if (description == null || description.getDisplayName() == null) {
LOG.info("<<< testFinished called");
return;
}
TestSuiteResultsStore store = getStore(description);
long endTimeTestCase = System.currentTimeMillis();
// this case may have failed and then we already have an entry.
// check for description in list
boolean found = false;
for (int i = 0; i < store.testCaseResults.size(); i++) {
if (store.testCaseResults.get(i).getName().equals(getTestCaseName(description))) {
// update time for failed entry
//testCaseResults.get(i).setTime(new Long(endTimeTestCase - startTimeTestCase).toString());
store.testCaseResults.get(i).setTime(getFormattedDuration(endTimeTestCase - startTimeTestCase));
found = true;
break;
}
}
if (found == false) {
// create new entry
TestCaseResult testCaseResult = new TestCaseResult(getTestCaseName(description),
getTestSuiteClassName(description),
//new Long(endTimeTestCase - startTimeTestCase).toString(),
getFormattedDuration(endTimeTestCase - startTimeTestCase),
"ok", // status
null, // no failure
null // no systemOut
);
store.testCaseResults.add(testCaseResult);
}
store.tests++;
store.consumedTime += (endTimeTestCase - startTimeTestCase);
LOG.info("<<< testFinished called");
}
// called when a test will not run due to annotation @Ignore
public void testIgnored(Description description) {
LOG.info(">>> testIgnored called");
printDescription(description, 1);
if (description == null || description.getDisplayName() == null) {
LOG.info("<<< testFinished called");
return;
}
TestSuiteResultsStore store = getStore(description);
// create new entry
TestCaseResult testCaseResult = new TestCaseResult(getTestCaseName(description),
getTestSuiteClassName(description),
//new Long(endTimeTestCase - startTimeTestCase).toString(),
"0.000",
"ignored", // status
null, // no failure
null // no systemOut
);
store.testCaseResults.add(testCaseResult);
store.skipped++;
LOG.info("<<< testIgnored called");
}
// called when all tests are finished
public void testRunFinished(Result result) {
ArrayList<TestSuiteResult> testSuiteResults = new ArrayList<TestSuiteResult>();
LOG.info(">>> testRunFinished called");
LOG.info("testRunFinished testSuitesMap.size() = " + testSuitesMap.size());
for (Map.Entry<String, TestSuiteResultsStore> testSuiteEntry : testSuitesMap.entrySet()) {
LOG.info("testRunFinished testSuiteName = " + testSuiteEntry.getKey());
TestSuiteResultsStore store = testSuiteEntry.getValue();
TestSuiteResult testSuiteResult = new TestSuiteResult(testSuiteEntry.getKey(),
getFormattedDuration(store.consumedTime),
store.tests,
store.errors,
store.skipped,
store.failures,
store.testCaseResults.toArray(new TestCaseResult[store.testCaseResults.size()]));
testSuiteResults.add(testSuiteResult);
}
LOG.info("testRunFinished after for loop");
testResult = new TestResult(testSuiteResults.toArray(new TestSuiteResult[testSuiteResults.size()]));
LOG.info("<<< testRunFinished called");
}
// called before any tests have been run
public void testRunStarted(Description description) {
LOG.info(">>> testRunStarted called");
printDescription(description, 1);
LOG.info("<<< testRunStarted called");
}
}
|
|
package cz.muni.fi.civ.newohybat.game;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.jboss.arquillian.junit.Arquillian;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.api.event.rule.AfterMatchFiredEvent;
import org.kie.api.event.rule.AgendaEventListener;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.rule.FactHandle;
import org.mockito.ArgumentCaptor;
import cz.muni.fi.civ.newohybat.drools.events.AdvanceEvent;
import cz.muni.fi.civ.newohybat.drools.events.TurnEvent;
import cz.muni.fi.civ.newohybat.persistence.facade.dto.AdvanceDTO;
import cz.muni.fi.civ.newohybat.persistence.facade.dto.CityDTO;
import cz.muni.fi.civ.newohybat.persistence.facade.dto.GovernmentDTO;
import cz.muni.fi.civ.newohybat.persistence.facade.dto.PlayerDTO;
@RunWith(Arquillian.class)
public class AdvanceRulesJUnitTest extends BaseJUnitTest {
/*
* This test case shows research of an advance by a player.
* Process is dependent on the TurnEvent occurrences.
* Scenario: Player with one enabled advance (can research it), starts the research, has sufficient production,
* process can be completed after single new TurnEvent.
* Check:
* process active before turn, waits for it
* process completed after one turn
* player can research next advance
* player can build improvement/create unit/change government, which was invented by researched advance
*/
@Test
public void testWaitForNewTurnToComplete(){
// Add mock eventlistener to check which rules fired
AgendaEventListener ael = mock( AgendaEventListener.class );
ksession.addEventListener( ael );
// prepare test data
GovernmentDTO mercantilism = getGovernment("mercantilism");
// new player with research points 205, "basicOne" advance reached, "consecutiveOne" to research
PlayerDTO player = getPlayer(1L, "honza");
player.setResearch(205);
// Define advances
AdvanceDTO basicOne = getAdvance("basicOne", 100);
basicOne.getEnabledAdvances().add("consecutiveOne");
// Advance to be researched next, its cost is 100 units, player has enough, should complete after one turn
AdvanceDTO consecutiveOne = getAdvance("consecutiveOne", 100);
consecutiveOne.getEnabledAdvances().add("consecutiveTwo");
consecutiveOne.getEnabledCityImprovements().add("bank");
consecutiveOne.getEnabledGovernments().add("mercantilism");
consecutiveOne.getEnabledUnitTypes().add("warClerk");
// init the advance tree by setting reached and enabled advances manually
player.getAdvances().add("basicOne");
player.getEnabledAdvances().add("consecutiveOne");
// create a city of player
CityDTO city = getCity(1L, "marefy");
Set<String> improvements = new HashSet<String>();
city.setImprovements(improvements);
city.setOwner(player.getId());
// insert test data as facts
game.insert(basicOne);
game.insert(consecutiveOne);
game.insert(getAdvance("consecutiveTwo", 10));
game.insert(mercantilism);
game.insert(player);
game.insert(city);
// currentAdvance not set, just to prepare data inserted in session
// ksession.fireAllRules();
// begin research
game.playerBeginAdvance(player.getId(), consecutiveOne.getIdent());
// now it should start the process
// ksession.fireAllRules();
// get all active processes
// List<ProcessInstance> processes = (List<ProcessInstance>)ksession.getProcessInstances();
// Catch the afterMatchFired events, which contains fired rules
ArgumentCaptor<AfterMatchFiredEvent> aafe = ArgumentCaptor.forClass( AfterMatchFiredEvent.class );
verify( ael ,atLeastOnce()).afterMatchFired( aafe.capture() );
List<String> firedRules = getFiredRules(aafe.getAllValues());
Assert.assertTrue("Discover Advance rule fired.",firedRules.contains("Discover Advance"));
// System.out.println(processes.size());
// Assert.assertTrue("One Process Should Be Active",processes.size()==1);
// get the process
// Long pId = processes.get(0).getId();
// assertProcessInstanceActive(pId, ksession);
// new TurnEvent occured
ksession.getEntryPoint("GameControlStream").insert(new TurnEvent());
ksession.fireAllRules();
// process should be completed
// assertProcessInstanceCompleted(pId, ksession);
player=game.getPlayer(player.getId());
Assert.assertTrue("Player has reached advance.",player.getAdvances().contains(consecutiveOne.getIdent()));
Assert.assertTrue("Player can discover advance.",player.getEnabledAdvances().containsAll(consecutiveOne.getEnabledAdvances()));
Assert.assertTrue("Player can build bank.",city.getEnabledImprovements().contains("bank"));
Assert.assertTrue("Player can make warClerk.",city.getEnabledUnitTypes().contains("warClerk"));
Assert.assertTrue("Player can convert to mercantilism.",player.getEnabledGovernments().contains("mercantilism"));
}
/*
* Tests ability to cancel the research before it finishes.
*/
@Test
public void testCancel(){
// Add mock eventlistener to check which rules fired
AgendaEventListener ael = mock( AgendaEventListener.class );
ksession.addEventListener( ael );
// prepare test data
GovernmentDTO mercantilism = getGovernment("mercantilism");
// new player with research points 205, "basicOne" advance reached, "consecutiveOne" to research
PlayerDTO player = getPlayer(1L, "honza");
player.setResearch(205);
// Define advances
AdvanceDTO basicOne = getAdvance("basicOne", 100);
basicOne.getEnabledAdvances().add("consecutiveOne");
// Advance to be researched next, its cost is 100 units, player has enough, should complete after one turn
AdvanceDTO consecutiveOne = getAdvance("consecutiveOne", 100);
consecutiveOne.getEnabledAdvances().add("consecutiveTwo");
consecutiveOne.getEnabledCityImprovements().add("bank");
consecutiveOne.getEnabledGovernments().add("mercantilism");
consecutiveOne.getEnabledUnitTypes().add("warClerk");
// init the advance tree by setting reached and enabled advances manually
player.getAdvances().add("basicOne");
player.getEnabledAdvances().add("consecutiveOne");
// create a city of player
CityDTO city = getCity(1L, "marefy");
Set<String> improvements = new HashSet<String>();
city.setImprovements(improvements);
city.setOwner(player.getId());
// insert test data as facts
game.insert(basicOne);
game.insert(consecutiveOne);
game.insert(getAdvance("consecutiveTwo", 10));
game.insert(mercantilism);
game.insert(player);
game.insert(city);
// currentAdvance not set, just to prepare data inserted in session
// ksession.fireAllRules();
// begin research
game.playerBeginAdvance(player.getId(), consecutiveOne.getIdent());
// now it should start the process
// get all active processes
// List<ProcessInstance> processes = (List<ProcessInstance>)ksession.getProcessInstances();
// Catch the afterMatchFired events, which contains fired rules
ArgumentCaptor<AfterMatchFiredEvent> aafe = ArgumentCaptor.forClass( AfterMatchFiredEvent.class );
verify( ael ,atLeastOnce()).afterMatchFired( aafe.capture() );
List<String> firedRules = getFiredRules(aafe.getAllValues());
Assert.assertTrue("Discover Advance rule fired.",firedRules.contains("Discover Advance"));
// Assert.assertTrue("One Process Should Be Active",processes.size()==1);
// // get the process
// Long pId = processes.get(0).getId();
// assertProcessInstanceActive(pId, ksession);
ksession.getEntryPoint("ActionCanceledStream").insert(new AdvanceEvent(player.getId()));
ksession.fireAllRules();
// process should be completed
// assertProcessInstanceCompleted(pId, ksession);
// new TurnEvent occured, should not have any effect
ksession.getEntryPoint("GameControlStream").insert(new TurnEvent());
ksession.fireAllRules();
// process should be completed
// assertProcessInstanceCompleted(pId, ksession);
player = game.getPlayer(player.getId());
Assert.assertTrue("Player has basic advance.",player.getAdvances().contains(basicOne.getIdent()));
Assert.assertFalse("Player can't discover advance.",player.getEnabledAdvances().containsAll(consecutiveOne.getEnabledAdvances()));
Assert.assertFalse("Player cannot build bank.",city.getEnabledImprovements().contains("bank"));
Assert.assertFalse("Player cannot make warClerk.",city.getEnabledUnitTypes().contains("warClerk"));
Assert.assertFalse("Player cannot convert to mercantilism.",player.getEnabledGovernments().contains("mercantilism"));
}
private static CityDTO getCity(Long id, String name){
CityDTO city = new CityDTO();
city.setId(id);
city.setName(name);
city.setResourcesConsumption(0);
city.setResourcesProduction(0);
city.setUnitsSupport(0);
city.setFoodConsumption(0);
city.setFoodProduction(0);
city.setFoodStock(0);
city.setSize(0);
city.setTradeProduction(0);
city.setPeopleEntertainers(0);
city.setPeopleScientists(0);
city.setPeopleTaxmen(0);
city.setWeLoveDay(false);
city.setDisorder(false);
city.setSize(1);
city.setPeopleHappy(0);
city.setPeopleContent(0);
city.setPeopleUnhappy(0);
city.setImprovements(new HashSet<String>());
city.setHomeUnits(new HashSet<Long>());
city.setEnabledUnitTypes(new HashSet<String>());
city.setEnabledImprovements(new HashSet<String>());
return city;
}
private static PlayerDTO getPlayer(Long id, String name){
PlayerDTO player = new PlayerDTO();
player.setId(id);
player.setName(name);
player.setLuxuriesRatio(0);
player.setTaxesRatio(0);
player.setResearchRatio(0);
player.setResearch(0);
player.setResearchSpent(0);
player.setAdvances(new HashSet<String>());
player.setEnabledAdvances(new HashSet<String>());
player.setEnabledGovernments(new HashSet<String>());
return player;
}
private static AdvanceDTO getAdvance(String ident, Integer cost){
AdvanceDTO advance = new AdvanceDTO();
advance.setIdent(ident);
advance.setEnabledAdvances(new HashSet<String>());
advance.setEnabledCityImprovements(new HashSet<String>());
advance.setEnabledGovernments(new HashSet<String>());
advance.setEnabledUnitTypes(new HashSet<String>());
advance.setCost(cost);
return advance;
}
private static GovernmentDTO getGovernment(String ident){
GovernmentDTO gov = new GovernmentDTO();
gov.setIdent(ident);
return gov;
}
}
|
|
/*
* Copyright (C) 2012 United States Government as represented by the Administrator of the
* National Aeronautics and Space Administration.
* All Rights Reserved.
*/
package gov.nasa.worldwind.cache;
import gov.nasa.worldwind.*;
import gov.nasa.worldwind.avlist.*;
import gov.nasa.worldwind.util.*;
import java.util.logging.Level;
/**
* @author tag
* @version $Id: AbstractFileStore.java 1171 2013-02-11 21:45:02Z dcollins $
*/
public abstract class AbstractFileStore extends WWObjectImpl implements FileStore
{
protected static class StoreLocation extends AVListImpl
{
protected boolean markWhenUsed = false;
public StoreLocation(java.io.File file, boolean isInstall)
{
this.setValue(AVKey.FILE_STORE_LOCATION, file);
this.setValue(AVKey.INSTALLED, isInstall);
}
public StoreLocation(java.io.File file)
{
this(file, false);
}
public java.io.File getFile()
{
Object o = this.getValue(AVKey.FILE_STORE_LOCATION);
return (o != null && o instanceof java.io.File) ? (java.io.File) o : null;
}
public void setFile(java.io.File file)
{
this.setValue(AVKey.FILE_STORE_LOCATION, file);
}
public boolean isInstall()
{
Object o = this.getValue(AVKey.INSTALLED);
return (o != null && o instanceof Boolean) ? (Boolean) o : false;
}
public void setInstall(boolean isInstall)
{
this.setValue(AVKey.INSTALLED, isInstall);
}
public boolean isMarkWhenUsed()
{
return markWhenUsed;
}
public void setMarkWhenUsed(boolean markWhenUsed)
{
this.markWhenUsed = markWhenUsed;
}
}
// Retrieval could be occurring on several threads when the app adds a read location, so protect the list of read
// locations from concurrent modification.
protected final java.util.List<StoreLocation> readLocations =
new java.util.concurrent.CopyOnWriteArrayList<StoreLocation>();
protected StoreLocation writeLocation = null;
private final Object fileLock = new Object();
//**************************************************************//
//******************** File Store Configuration **************//
//**************************************************************//
protected void initialize(java.io.InputStream xmlConfigStream)
{
javax.xml.parsers.DocumentBuilderFactory docBuilderFactory =
javax.xml.parsers.DocumentBuilderFactory.newInstance();
try
{
javax.xml.parsers.DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
org.w3c.dom.Document doc = docBuilder.parse(xmlConfigStream);
// The order of the following two calls is important, because building the writable location may entail
// creating a location that's included in the specified read locations.
this.buildWritePaths(doc);
this.buildReadPaths(doc);
if (this.writeLocation == null)
{
Logging.logger().warning("FileStore.NoWriteLocation");
}
if (this.readLocations.size() == 0)
{
// This should not happen because the writable location is added to the read list, but check nonetheless
String message = Logging.getMessage("FileStore.NoReadLocations");
Logging.logger().severe(message);
throw new IllegalStateException(message);
}
}
catch (javax.xml.parsers.ParserConfigurationException e)
{
String message = Logging.getMessage("FileStore.ExceptionReadingConfigurationFile");
Logging.logger().severe(message);
throw new IllegalStateException(message, e);
}
catch (org.xml.sax.SAXException e)
{
String message = Logging.getMessage("FileStore.ExceptionReadingConfigurationFile");
Logging.logger().severe(message);
throw new IllegalStateException(message, e);
}
catch (java.io.IOException e)
{
String message = Logging.getMessage("FileStore.ExceptionReadingConfigurationFile");
Logging.logger().severe(message);
throw new IllegalStateException(message, e);
}
}
protected void buildReadPaths(org.w3c.dom.Node dataFileStoreNode)
{
javax.xml.xpath.XPathFactory pathFactory = javax.xml.xpath.XPathFactory.newInstance();
javax.xml.xpath.XPath pathFinder = pathFactory.newXPath();
try
{
org.w3c.dom.NodeList locationNodes = (org.w3c.dom.NodeList) pathFinder.evaluate(
"/dataFileStore/readLocations/location",
dataFileStoreNode.getFirstChild(),
javax.xml.xpath.XPathConstants.NODESET);
for (int i = 0; i < locationNodes.getLength(); i++)
{
org.w3c.dom.Node location = locationNodes.item(i);
String prop = pathFinder.evaluate("@property", location);
String wwDir = pathFinder.evaluate("@wwDir", location);
String append = pathFinder.evaluate("@append", location);
String isInstall = pathFinder.evaluate("@isInstall", location);
String isMarkWhenUsed = pathFinder.evaluate("@isMarkWhenUsed", location);
String path = buildLocationPath(prop, append, wwDir);
if (path == null)
{
Logging.logger().log(Level.WARNING, "FileStore.LocationInvalid",
prop != null ? prop : Logging.getMessage("generic.Unknown"));
continue;
}
StoreLocation oldStore = this.storeLocationFor(path);
if (oldStore != null) // filter out duplicates
continue;
// Even paths that don't exist or are otherwise problematic are added to the list because they may
// become readable during the session. E.g., removable media. So add them to the search list.
java.io.File pathFile = new java.io.File(path);
if (pathFile.exists() && !pathFile.isDirectory())
{
Logging.logger().log(Level.WARNING, "FileStore.LocationIsFile", pathFile.getPath());
}
boolean pathIsInstall = isInstall != null && (isInstall.contains("t") || isInstall.contains("T"));
StoreLocation newStore = new StoreLocation(pathFile, pathIsInstall);
// If the input parameter "markWhenUsed" is null or empty, then the StoreLocation should keep its
// default value. Otherwise the store location value is set to true when the input parameter contains
// "t", and is set to false otherwise.
if (isMarkWhenUsed != null && isMarkWhenUsed.length() > 0)
newStore.setMarkWhenUsed(isMarkWhenUsed.toLowerCase().contains("t"));
this.readLocations.add(newStore);
}
}
catch (javax.xml.xpath.XPathExpressionException e)
{
String message = Logging.getMessage("FileStore.ExceptionReadingConfigurationFile");
Logging.logger().severe(message);
throw new IllegalStateException(message, e);
}
}
@SuppressWarnings({"ResultOfMethodCallIgnored"})
protected void buildWritePaths(org.w3c.dom.Node dataFileCacheNode)
{
javax.xml.xpath.XPathFactory pathFactory = javax.xml.xpath.XPathFactory.newInstance();
javax.xml.xpath.XPath pathFinder = pathFactory.newXPath();
try
{
org.w3c.dom.NodeList locationNodes = (org.w3c.dom.NodeList) pathFinder.evaluate(
"/dataFileStore/writeLocations/location",
dataFileCacheNode.getFirstChild(),
javax.xml.xpath.XPathConstants.NODESET);
for (int i = 0; i < locationNodes.getLength(); i++)
{
org.w3c.dom.Node location = locationNodes.item(i);
String prop = pathFinder.evaluate("@property", location);
String wwDir = pathFinder.evaluate("@wwDir", location);
String append = pathFinder.evaluate("@append", location);
String create = pathFinder.evaluate("@create", location);
String path = buildLocationPath(prop, append, wwDir);
if (path == null)
{
Logging.logger().log(Level.WARNING, "FileStore.LocationInvalid",
prop != null ? prop : Logging.getMessage("generic.Unknown"));
continue;
}
Logging.logger().log(Level.FINER, "FileStore.AttemptingWriteDir", path);
java.io.File pathFile = new java.io.File(path);
if (!pathFile.exists() && create != null && (create.contains("t") || create.contains("T")))
{
Logging.logger().log(Level.FINER, "FileStore.MakingDirsFor", path);
pathFile.mkdirs();
}
if (pathFile.isDirectory() && pathFile.canWrite() && pathFile.canRead())
{
Logging.logger().log(Level.FINER, "FileStore.WriteLocationSuccessful", path);
this.writeLocation = new StoreLocation(pathFile);
// Remove the writable location from search path if it already exists.
StoreLocation oldLocation = this.storeLocationFor(path);
if (oldLocation != null)
this.readLocations.remove(oldLocation);
// Writable location is always first in search path.
this.readLocations.add(0, this.writeLocation);
break; // only need one
}
}
}
catch (javax.xml.xpath.XPathExpressionException e)
{
String message = Logging.getMessage("FileStore.ExceptionReadingConfigurationFile");
Logging.logger().severe(message);
throw new IllegalStateException(message, e);
}
}
protected static String buildLocationPath(String property, String append, String wwDir)
{
String path = propertyToPath(property);
if (append != null && append.length() != 0)
path = WWIO.appendPathPart(path, append.trim());
if (wwDir != null && wwDir.length() != 0)
path = WWIO.appendPathPart(path, wwDir.trim());
return path;
}
protected static String propertyToPath(String propName)
{
if (propName == null || propName.length() == 0)
return null;
String prop = System.getProperty(propName);
if (prop != null)
return prop;
if (propName.equalsIgnoreCase("gov.nasa.worldwind.platform.alluser.store"))
return determineAllUserLocation();
if (propName.equalsIgnoreCase("gov.nasa.worldwind.platform.user.store"))
return determineSingleUserLocation();
return null;
}
protected static String determineAllUserLocation()
{
if (gov.nasa.worldwind.Configuration.isMacOS())
{
return "/Library/Caches";
}
else if (gov.nasa.worldwind.Configuration.isWindowsOS())
{
String path = System.getenv("ALLUSERSPROFILE");
if (path == null)
{
Logging.logger().severe("generic.AllUsersWindowsProfileNotKnown");
return null;
}
return path + (Configuration.isWindows7OS() ? "" : "\\Application Data");
}
else if (gov.nasa.worldwind.Configuration.isLinuxOS() || gov.nasa.worldwind.Configuration.isUnixOS()
|| gov.nasa.worldwind.Configuration.isSolarisOS())
{
return "/var/cache/";
}
else
{
Logging.logger().warning("generic.UnknownOperatingSystem");
return null;
}
}
protected static String determineSingleUserLocation()
{
String home = getUserHomeDir();
if (home == null)
{
Logging.logger().warning("generic.UsersHomeDirectoryNotKnown");
return null;
}
String path = null;
if (gov.nasa.worldwind.Configuration.isMacOS())
{
path = "/Library/Caches";
}
else if (gov.nasa.worldwind.Configuration.isWindowsOS())
{
// This produces an incorrect path with duplicate parts,
// like "C:\Users\PatC:\Users\Pat\Application Data".
//path = System.getenv("USERPROFILE");
//if (path == null)
//{
// Logging.logger().fine("generic.UsersWindowsProfileNotKnown");
// return null;
//}
//path += "\\Application Data";
path = "\\Application Data";
}
else if (gov.nasa.worldwind.Configuration.isLinuxOS() || gov.nasa.worldwind.Configuration.isUnixOS()
|| gov.nasa.worldwind.Configuration.isSolarisOS())
{
path = "/var/cache/";
}
else
{
Logging.logger().fine("generic.UnknownOperatingSystem");
}
if (path == null)
return null;
return home + path;
}
protected static String getUserHomeDir()
{
return System.getProperty("user.home");
}
//**************************************************************//
//******************** File Store Locations ******************//
//**************************************************************//
public java.util.List<? extends java.io.File> getLocations()
{
java.util.ArrayList<java.io.File> locations = new java.util.ArrayList<java.io.File>();
for (StoreLocation location : this.readLocations)
{
locations.add(location.getFile());
}
return locations;
}
public java.io.File getWriteLocation()
{
return (this.writeLocation != null) ? this.writeLocation.getFile() : null;
}
public void addLocation(String newPath, boolean isInstall)
{
this.addLocation(this.readLocations.size(), newPath, isInstall);
}
public void addLocation(int index, String newPath, boolean isInstall)
{
if (newPath == null || newPath.length() == 0)
{
String message = Logging.getMessage("nullValue.FileStorePathIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
if (index < 0)
{
String message = Logging.getMessage("generic.InvalidIndex", index);
Logging.logger().fine(message);
throw new IllegalArgumentException(message);
}
StoreLocation oldLocation = this.storeLocationFor(newPath);
if (oldLocation != null)
this.readLocations.remove(oldLocation);
if (index > 0 && index > this.readLocations.size())
index = this.readLocations.size();
java.io.File newFile = new java.io.File(newPath);
StoreLocation newLocation = new StoreLocation(newFile, isInstall);
this.readLocations.add(index, newLocation);
}
public void removeLocation(String path)
{
if (path == null || path.length() == 0)
{
String message = Logging.getMessage("nullValue.FileStorePathIsNull");
Logging.logger().severe(message);
// Just warn and return.
return;
}
StoreLocation location = this.storeLocationFor(path);
if (location == null) // Path is not part of this FileStore.
return;
if (location.equals(this.writeLocation))
{
String message = Logging.getMessage("FileStore.CannotRemoveWriteLocation", path);
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.readLocations.remove(location);
}
public boolean isInstallLocation(String path)
{
if (path == null || path.length() == 0)
{
String message = Logging.getMessage("nullValue.FileStorePathIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
StoreLocation location = this.storeLocationFor(path);
return location != null && location.isInstall();
}
protected StoreLocation storeLocationFor(String path)
{
java.io.File file = new java.io.File(path);
for (StoreLocation location : this.readLocations)
{
if (file.equals(location.getFile()))
return location;
}
return null;
}
//**************************************************************//
//******************** File Store Contents *******************//
//**************************************************************//
public boolean containsFile(String fileName)
{
if (fileName == null)
return false;
for (StoreLocation location : this.readLocations)
{
java.io.File dir = location.getFile();
java.io.File file;
if (fileName.startsWith(dir.getAbsolutePath()))
file = new java.io.File(fileName);
else
file = makeAbsoluteFile(dir, fileName);
if (file.exists())
return true;
}
return false;
}
/**
* @param fileName the name of the file to find
* @param checkClassPath if <code>true</code>, the class path is first searched for the file, otherwise the class
* path is not searched unless it's one of the explicit paths in the cache search directories
*
* @return a handle to the requested file if it exists in the cache, otherwise null
*
* @throws IllegalArgumentException if <code>fileName</code> is null
*/
public java.net.URL findFile(String fileName, boolean checkClassPath)
{
if (fileName == null)
{
String message = Logging.getMessage("nullValue.FilePathIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
if (checkClassPath)
{
java.net.URL url = this.getClass().getClassLoader().getResource(fileName);
if (url != null)
return url;
}
for (StoreLocation location : this.readLocations)
{
java.io.File dir = location.getFile();
if (!dir.exists())
continue;
java.io.File file = new java.io.File(makeAbsolutePath(dir, fileName));
if (file.exists())
{
try
{
if (location.isMarkWhenUsed())
markFileUsed(file);
else
markFileUsed(file.getParentFile());
return file.toURI().toURL();
}
catch (java.net.MalformedURLException e)
{
Logging.logger().log(Level.SEVERE,
Logging.getMessage("FileStore.ExceptionCreatingURLForFile", file.getPath()), e);
}
}
}
return null;
}
@SuppressWarnings({"ResultOfMethodCallIgnored"})
protected static void markFileUsed(java.io.File file)
{
if (file == null)
return;
long currentTime = System.currentTimeMillis();
if (file.canWrite())
file.setLastModified(currentTime);
if (file.isDirectory())
return;
java.io.File parent = file.getParentFile();
if (parent != null && parent.canWrite())
parent.setLastModified(currentTime);
}
/**
* @param fileName the name to give the newly created file
*
* @return a handle to the newly created file if it could be created and added to the file store, otherwise null
*
* @throws IllegalArgumentException if <code>fileName</code> is null
*/
public java.io.File newFile(String fileName)
{
if (fileName == null)
{
String message = Logging.getMessage("nullValue.FilePathIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
if (this.writeLocation != null)
{
String fullPath = makeAbsolutePath(this.writeLocation.getFile(), fileName);
java.io.File file = new java.io.File(fullPath);
boolean canCreateFile = false;
// This block of code must be synchronized for proper operation. A thread may check that
// file.getParentFile() does not exist, and become immediately suspended. A second thread may then create
// the parent and ancestor directories. When the first thread wakes up, file.getParentFile().mkdirs()
// fails, resulting in an erroneous log message: The log reports that the file cannot be created.
synchronized (this.fileLock)
{
if (file.getParentFile().exists())
canCreateFile = true;
else if (file.getParentFile().mkdirs())
canCreateFile = true;
}
if (canCreateFile)
return file;
else
{
String msg = Logging.getMessage("generic.CannotCreateFile", fullPath);
Logging.logger().severe(msg);
}
}
return null;
}
/**
* @param url the "file:" URL of the file to remove from the file store
*
* @throws IllegalArgumentException if <code>url</code> is null
*/
@SuppressWarnings({"ResultOfMethodCallIgnored"})
public void removeFile(java.net.URL url)
{
if (url == null)
{
String msg = Logging.getMessage("nullValue.URLIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
try
{
java.io.File file = new java.io.File(url.toURI());
// This block of code must be synchronized for proper operation. A thread may check that the file exists,
// and become immediately suspended. A second thread may then delete that file. When the first thread
// wakes up, file.delete() fails.
synchronized (this.fileLock)
{
if (file.exists())
file.delete();
}
}
catch (java.net.URISyntaxException e)
{
Logging.logger().log(Level.SEVERE, Logging.getMessage("FileStore.ExceptionRemovingFile", url.toString()),
e);
}
}
protected static java.io.File makeAbsoluteFile(java.io.File file, String fileName)
{
return new java.io.File(file.getAbsolutePath() + "/" + fileName);
}
protected static String makeAbsolutePath(java.io.File dir, String fileName)
{
return dir.getAbsolutePath() + "/" + fileName;
}
protected static String normalizeFileStoreName(String fileName)
{
// Convert all file separators to forward slashes, and strip any leading or trailing file separators
// from the path.
String normalizedName = fileName.replaceAll("\\\\", "/");
normalizedName = WWIO.stripLeadingSeparator(normalizedName);
normalizedName = WWIO.stripTrailingSeparator(normalizedName);
return normalizedName;
}
protected static String storePathForFile(StoreLocation location, java.io.File file)
{
String path = file.getPath();
if (location != null)
{
String locationPath = location.getFile().getPath();
if (path.startsWith(locationPath))
path = path.substring(locationPath.length(), path.length());
}
return path;
}
//**************************************************************//
//******************** File Store Content Discovery **********//
//**************************************************************//
public String[] listFileNames(String pathName, FileStoreFilter filter)
{
if (filter == null)
{
String msg = Logging.getMessage("nullValue.FilterIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
// Do not recurse.
return this.doListFileNames(pathName, filter, false, false);
}
public String[] listAllFileNames(String pathName, FileStoreFilter filter)
{
if (filter == null)
{
String msg = Logging.getMessage("nullValue.FilterIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
// Recurse, and continue to search each branch after a match is found.
return this.doListFileNames(pathName, filter, true, false);
}
public String[] listTopFileNames(String pathName, FileStoreFilter filter)
{
if (filter == null)
{
String msg = Logging.getMessage("nullValue.FilterIsNull");
Logging.logger().severe(msg);
throw new IllegalArgumentException(msg);
}
// Recurse, but stop searching a branch after a match is found.
return this.doListFileNames(pathName, filter, true, true);
}
protected String[] doListFileNames(String pathName, FileStoreFilter filter, boolean recurse,
boolean exitBranchOnFirstMatch)
{
java.util.ArrayList<String> nameList = null;
for (StoreLocation location : this.readLocations)
{
// If the path name is null, then just search from the root of each location. Otherwise search from the
// named cache path.
java.io.File dir = location.getFile();
if (pathName != null)
dir = new java.io.File(makeAbsolutePath(dir, pathName));
// Either the location does not exists, or the speciifed path does not exist under that location. In either
// case we skip searching this location.
if (!dir.exists())
continue;
// Lazily initialize the list of file names. If no location contains the specified path, then the list is
// not created, and this method will return null.
if (nameList == null)
nameList = new java.util.ArrayList<String>();
this.doListFileNames(location, dir, filter, recurse, exitBranchOnFirstMatch, nameList);
}
if (nameList == null)
return null;
String[] names = new String[nameList.size()];
nameList.toArray(names);
return names;
}
protected void doListFileNames(StoreLocation location, java.io.File dir, FileStoreFilter filter,
boolean recurse, boolean exitBranchOnFirstMatch, java.util.Collection<String> names)
{
java.util.ArrayList<java.io.File> subDirs = new java.util.ArrayList<java.io.File>();
// Search the children of the specified directory. If the child is a directory, append it to the list of sub
// directories to search later. Otherwise, try to list the file as a match. If the file is a match and
// exitBranchOnFirstMatch is true, then exit this branch without considering any other files. This has the
// effect of choosing files closest to the search root.
for (java.io.File childFile : dir.listFiles())
{
if (childFile == null)
continue;
if (childFile.isDirectory())
{
subDirs.add(childFile);
}
else
{
if (this.listFile(location, childFile, filter, names) && exitBranchOnFirstMatch)
return;
}
}
if (!recurse)
return;
// Recursively search each sub-directory. If exitBranchOnFirstMatch is true, then we did not find a match under
// this directory.
for (java.io.File childDir : subDirs)
{
this.doListFileNames(location, childDir, filter, recurse, exitBranchOnFirstMatch, names);
}
}
protected boolean listFile(StoreLocation location, java.io.File file, FileStoreFilter filter,
java.util.Collection<String> names)
{
String fileName = storePathForFile(location, file);
if (fileName == null)
return false;
String normalizedName = normalizeFileStoreName(fileName);
return this.listFileName(location, normalizedName, filter, names);
}
@SuppressWarnings({"UnusedDeclaration"})
protected boolean listFileName(StoreLocation location, String fileName, FileStoreFilter filter,
java.util.Collection<String> names)
{
if (!filter.accept(this, fileName))
return false;
names.add(fileName);
return true;
}
}
|
|
package ethanjones.cubes.block;
import ethanjones.cubes.core.id.IDManager;
import ethanjones.cubes.core.json.JsonException;
import ethanjones.cubes.core.system.CubesException;
import ethanjones.cubes.core.util.BlockFace;
import ethanjones.cubes.graphics.world.block.BlockTextureHandler;
import ethanjones.cubes.item.Item;
import ethanjones.cubes.item.ItemJson;
import ethanjones.cubes.item.ItemStack;
import ethanjones.cubes.item.ItemTool;
import ethanjones.cubes.world.World;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.eclipsesource.json.JsonObject.Member;
import com.eclipsesource.json.JsonValue;
import java.util.Arrays;
public class BlockJson {
public static void json(JsonArray json) {
for (JsonValue value : json) {
addBlock(value.asObject());
}
}
public static void addBlock(JsonObject json) {
String id = json.getString("id", null);
if (id == null) throw new JsonException("No block id");
int meta = json.getInt("meta", 1);
JBlock block = new JBlock(id, meta);
block.textures = parseMetaElement(json, "texture", new String[meta][], textureParser);
block.lightLevel = parseMetaElement(json, "lightLevel", new Integer[meta], integerParser);
block.transparent = parseMetaElement(json, "transparent", new Boolean[meta], booleanParser);
block.drops = parseMetaElement(json, "drops", new ItemStackPlaceholder[meta][], itemStackArrayParser);
JsonValue prop;
prop = json.get("displayMeta");
if (prop != null) {
JsonArray array = prop.asArray();
int[] ints = new int[array.size()];
for (int i = 0; i < array.size(); i++) {
ints[i] = array.get(i).asInt();
}
block.displayMeta = ints;
} else {
int[] ints = new int[meta];
for (int i = 0; i < ints.length; i++) {
ints[i] = i;
}
block.displayMeta = ints;
}
prop = json.get("mining");
if (prop != null) {
JsonObject object = prop.asObject();
for (JsonObject.Member member : object) {
switch (member.getName()) {
case "speed":
block.setMiningTime(member.getValue().asFloat());
break;
case "tool":
block.setMiningTool(ItemJson.toolType(member.getValue().asString()));
break;
case "toolLevel":
block.setMiningToolLevel(member.getValue().asInt());
break;
case "other":
block.setMiningOther(member.getValue().asBoolean());
break;
default:
throw new JsonException("Unexpected item tool member \"" + member.getName() + "\"");
}
}
}
for (JsonObject.Member member : json) {
switch (member.getName()) {
case "id":
case "meta":
case "texture":
case "lightLevel":
case "transparent":
case "displayMeta":
case "mining":
case "drops":
break;
default:
throw new JsonException("Unexpected block member \"" + member.getName() + "\"");
}
}
block.jsonFinish();
IDManager.register(block);
}
private static <T> T[] parseMetaElement(JsonObject json, String name, T[] t, MetaElementParser<T> parser) {
JsonValue j = json.get(name);
if (j == null) return null;
if (!j.isObject() || t.length == 1) {
Arrays.fill(t, parser.parse(j));
} else {
JsonObject jsonObject = j.asObject();
T defaultT = null;
for (Member member : jsonObject) {
String s = member.getName();
if (s.equals("default")) {
defaultT = parser.parse(member.getValue());
continue;
}
int m = -1;
try {
m = Integer.parseInt(s);
t[m] = null; // catch out of range exceptions
} catch (Exception e) {
throw new JsonException("Unexpected " + name + " member \"" + member.getName() + "\"");
}
t[m] = parser.parse(member.getValue());
}
for (int i = 0; i < t.length; i++) {
if (t[i] == null) {
if (defaultT == null) {
throw new JsonException(name + " for meta " + i + " not defined");
} else {
t[i] = defaultT;
}
}
}
}
return t;
}
public static boolean isJsonBlock(Block b) {
return b instanceof JBlock;
}
private static class JBlock extends Block {
protected Integer[] lightLevel;
protected Boolean[] transparent;
protected String[][] textures;
protected ItemStackPlaceholder[][] drops;
protected int[] displayMeta;
private final int meta;
private boolean canBeTransparent;
private boolean alwaysTransparent;
public JBlock(String id, int meta) {
super(id);
this.meta = meta;
}
private void jsonFinish() {
if (transparent != null) {
alwaysTransparent = true;
for (Boolean b : transparent) {
if (b) {
canBeTransparent = true;
} else {
alwaysTransparent = false;
}
}
}
}
@Override
public void loadGraphics() {
if (textures == null) {
super.loadGraphics();
return;
}
textureHandlers = new BlockTextureHandler[meta];
for (int m = 0; m < textureHandlers.length; m++) {
textureHandlers[m] = new BlockTextureHandler(textures[m][0]);
if (textures[m].length == 6) {
for (int s = 1; s < textures[m].length; s++) {
textureHandlers[m].setSide(s, textures[m][s]);
}
} else {
throw new CubesException("Invalid JBlock.textures length for id \"" + id + "\"");
}
}
textures = null;
}
@Override
public int getLightLevel(int meta) {
return lightLevel == null ? 0 : lightLevel[meta];
}
@Override
public boolean canBeTransparent() {
return canBeTransparent;
}
@Override
public boolean alwaysTransparent() {
return alwaysTransparent;
}
@Override
public boolean isTransparent(int meta) {
return transparent == null ? false : transparent[meta];
}
@Override
public int[] displayMetaValues() {
return displayMeta;
}
@Override
public ItemStack[] drops(World world, int x, int y, int z, int meta) {
if (drops == null) return super.drops(world, x, y, z, meta);
ItemStack[] itemStacks = new ItemStack[drops[meta].length]; //TODO percentage change drops
for (int i = 0; i < itemStacks.length; i++) {
itemStacks[i] = drops[meta][i].convertToItemStack();
}
return itemStacks;
}
protected void setMiningTime(float miningTime) {
this.miningTime = miningTime;
}
protected void setMiningTool(ItemTool.ToolType miningTool) {
this.miningTool = miningTool;
}
protected void setMiningToolLevel(int miningToolLevel) {
this.miningToolLevel = miningToolLevel;
}
protected void setMiningOther(boolean miningOther) {
this.miningOther = miningOther;
}
}
public interface MetaElementParser<E> {
E parse(JsonValue prop);
}
private static final MetaElementParser<Integer> integerParser = new MetaElementParser<Integer>() {
@Override
public Integer parse(JsonValue prop) {
return prop.asInt();
}
};
private static final MetaElementParser<Boolean> booleanParser = new MetaElementParser<Boolean>() {
@Override
public Boolean parse(JsonValue prop) {
return prop.asBoolean();
}
};
private static final MetaElementParser<String[]> textureParser = new MetaElementParser<String[]>() {
@Override
public String[] parse(JsonValue prop) {
String[] textures = new String[6];
if (prop.isString()) {
Arrays.fill(textures, prop.asString());
} else {
JsonObject texture = prop.asObject();
for (JsonObject.Member member : texture) {
String value = member.getValue().asString();
switch (member.getName()) {
case "posX":
textures[BlockFace.posX.index] = value;
break;
case "negX":
textures[BlockFace.negX.index] = value;
break;
case "posY":
case "top":
textures[BlockFace.posY.index] = value;
break;
case "negY":
case "bottom":
textures[BlockFace.negY.index] = value;
break;
case "posZ":
textures[BlockFace.posZ.index] = value;
break;
case "negZ":
textures[BlockFace.negZ.index] = value;
break;
case "side":
textures[BlockFace.posX.index] = value;
textures[BlockFace.negX.index] = value;
textures[BlockFace.posZ.index] = value;
textures[BlockFace.negZ.index] = value;
break;
case "other":
for (int i = 0; i < textures.length; i++) {
if (textures[i] == null) textures[i] = value;
}
break;
default:
throw new JsonException("Unexpected block texture member \"" + member.getName() + "\"");
}
}
}
return textures;
}
};
private static class ItemStackPlaceholder {
final String id;
final int count;
final int meta;
private ItemStackPlaceholder(String id, int count, int meta) {
this.id = id;
this.count = count;
this.meta = meta;
}
private ItemStack convertToItemStack() {
Item item = IDManager.toItem(id);
if (item == null) throw new JsonException("Invalid id: '" + id + "'");
return new ItemStack(item, count, meta);
}
}
private static final MetaElementParser<ItemStackPlaceholder[]> itemStackArrayParser = new MetaElementParser<ItemStackPlaceholder[]>() {
@Override
public ItemStackPlaceholder[] parse(JsonValue prop) {
if (prop.isArray()) {
JsonArray jsonArray = prop.asArray();
ItemStackPlaceholder[] itemStacks = new ItemStackPlaceholder[jsonArray.size()];
for (int i = 0; i < itemStacks.length; i++) {
itemStacks[i] = parseSingle(jsonArray.get(i).asObject());
}
return itemStacks;
} else {
return new ItemStackPlaceholder[]{parseSingle(prop.asObject())};
}
}
private ItemStackPlaceholder parseSingle(JsonObject p) {
String id = p.getString("id", null);
if (id == null) throw new JsonException("No itemstack id");
int count = p.getInt("count", 1);
int meta = p.getInt("meta", 0);
return new ItemStackPlaceholder(id, count, meta);
}
};
}
|
|
package com.github.jsonldjava.core;
import static com.github.jsonldjava.core.JsonLdConsts.RDF_LANGSTRING;
import static com.github.jsonldjava.core.JsonLdConsts.XSD_STRING;
import static com.github.jsonldjava.core.Regex.HEX;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class RDFDatasetUtils {
public static String toNQuads(RDFDataset dataset) {
final StringBuilder output = new StringBuilder(256);
toNQuads(dataset, output);
return output.toString();
}
public static void toNQuads(RDFDataset dataset, StringBuilder output) {
final List<String> quads = new ArrayList<String>();
for (String graphName : dataset.graphNames()) {
final List<RDFDataset.Quad> triples = dataset.getQuads(graphName);
if ("@default".equals(graphName)) {
graphName = null;
}
for (final RDFDataset.Quad triple : triples) {
quads.add(toNQuad(triple, graphName));
}
}
Collections.sort(quads);
for (final String quad : quads) {
output.append(quad);
}
}
static String toNQuad(RDFDataset.Quad triple, String graphName, String bnode) {
final StringBuilder output = new StringBuilder(256);
toNQuad(triple, graphName, bnode, output);
return output.toString();
}
static void toNQuad(RDFDataset.Quad triple, String graphName, String bnode,
StringBuilder output) {
final RDFDataset.Node s = triple.getSubject();
final RDFDataset.Node p = triple.getPredicate();
final RDFDataset.Node o = triple.getObject();
// subject is an IRI or bnode
if (s.isIRI()) {
output.append("<");
escape(s.getValue(), output);
output.append(">");
}
// normalization mode
else if (bnode != null) {
output.append(bnode.equals(s.getValue()) ? "_:a" : "_:z");
}
// normal mode
else {
output.append(s.getValue());
}
if (p.isIRI()) {
output.append(" <");
escape(p.getValue(), output);
output.append("> ");
}
// otherwise it must be a bnode (TODO: can we only allow this if the
// flag is set in options?)
else {
output.append(" ");
escape(p.getValue(), output);
output.append(" ");
}
// object is IRI, bnode or literal
if (o.isIRI()) {
output.append("<");
escape(o.getValue(), output);
output.append(">");
} else if (o.isBlankNode()) {
// normalization mode
if (bnode != null) {
output.append(bnode.equals(o.getValue()) ? "_:a" : "_:z");
}
// normal mode
else {
output.append(o.getValue());
}
} else {
output.append("\"");
escape(o.getValue(), output);
output.append("\"");
if (RDF_LANGSTRING.equals(o.getDatatype())) {
output.append("@").append(o.getLanguage());
} else if (!XSD_STRING.equals(o.getDatatype())) {
output.append("^^<");
escape(o.getDatatype(), output);
output.append(">");
}
}
// graph
if (graphName != null) {
if (graphName.indexOf("_:") != 0) {
output.append(" <");
escape(graphName, output);
output.append(">");
} else if (bnode != null) {
output.append(" _:g");
} else {
output.append(" ").append(graphName);
}
}
output.append(" .\n");
}
static String toNQuad(RDFDataset.Quad triple, String graphName) {
return toNQuad(triple, graphName, null);
}
final private static Pattern UCHAR_MATCHED = Pattern
.compile("\\u005C(?:([tbnrf\\\"'])|(?:u(" + HEX + "{4}))|(?:U(" + HEX + "{8})))");
public static String unescape(String str) {
String rval = str;
if (str != null) {
final Matcher m = UCHAR_MATCHED.matcher(str);
while (m.find()) {
String uni = m.group(0);
if (m.group(1) == null) {
final String hex = m.group(2) != null ? m.group(2) : m.group(3);
final int v = Integer.parseInt(hex, 16);// hex =
// hex.replaceAll("^(?:00)+",
// "");
if (v > 0xFFFF) {
// deal with UTF-32
// Integer v = Integer.parseInt(hex, 16);
final int vt = v - 0x10000;
final int vh = vt >> 10;
final int v1 = vt & 0x3FF;
final int w1 = 0xD800 + vh;
final int w2 = 0xDC00 + v1;
final StringBuilder b = new StringBuilder();
b.appendCodePoint(w1);
b.appendCodePoint(w2);
uni = b.toString();
} else {
uni = Character.toString((char) v);
}
} else {
final char c = m.group(1).charAt(0);
switch (c) {
case 'b':
uni = "\b";
break;
case 'n':
uni = "\n";
break;
case 't':
uni = "\t";
break;
case 'f':
uni = "\f";
break;
case 'r':
uni = "\r";
break;
case '\'':
uni = "'";
break;
case '\"':
uni = "\"";
break;
case '\\':
uni = "\\";
break;
default:
// do nothing
continue;
}
}
final String pat = Pattern.quote(m.group(0));
// final String x = Integer.toHexString(uni.charAt(0));
rval = rval.replaceAll(pat, uni);
}
}
return rval;
}
/**
* Escapes the given string according to the N-Quads escape rules
*
* @param str
* The string to escape
* @param rval
* The {@link StringBuilder} to append to.
*/
public static void escape(String str, StringBuilder rval) {
for (int i = 0; i < str.length(); i++) {
final char hi = str.charAt(i);
if (hi <= 0x8 || hi == 0xB || hi == 0xC || (hi >= 0xE && hi <= 0x1F)
|| (hi >= 0x7F && hi <= 0xA0) || // 0xA0 is end of
// non-printable latin-1
// supplement
// characters
((hi >= 0x24F // 0x24F is the end of latin extensions
&& !Character.isHighSurrogate(hi))
// TODO: there's probably a lot of other characters that
// shouldn't be escaped that
// fall outside these ranges, this is one example from the
// json-ld tests
)) {
rval.append(String.format("\\u%04x", (int) hi));
} else if (Character.isHighSurrogate(hi)) {
final char lo = str.charAt(++i);
final int c = (hi << 10) + lo + (0x10000 - (0xD800 << 10) - 0xDC00);
rval.append(String.format("\\U%08x", c));
} else {
switch (hi) {
case '\b':
rval.append("\\b");
break;
case '\n':
rval.append("\\n");
break;
case '\t':
rval.append("\\t");
break;
case '\f':
rval.append("\\f");
break;
case '\r':
rval.append("\\r");
break;
// case '\'':
// rval += "\\'";
// break;
case '\"':
rval.append("\\\"");
// rval += "\\u0022";
break;
case '\\':
rval.append("\\\\");
break;
default:
// just put the char as is
rval.append(hi);
break;
}
}
}
// return rval;
}
private static class Regex {
// define partial regexes
// final public static Pattern IRI =
// Pattern.compile("(?:<([^:]+:[^>]*)>)");
final public static Pattern IRI = Pattern.compile("(?:<([^>]*)>)");
final public static Pattern BNODE = Pattern.compile("(_:(?:[A-Za-z][A-Za-z0-9]*))");
final public static Pattern PLAIN = Pattern.compile("\"([^\"\\\\]*(?:\\\\.[^\"\\\\]*)*)\"");
final public static Pattern DATATYPE = Pattern.compile("(?:\\^\\^" + IRI + ")");
final public static Pattern LANGUAGE = Pattern.compile("(?:@([a-z]+(?:-[a-zA-Z0-9]+)*))");
final public static Pattern LITERAL = Pattern
.compile("(?:" + PLAIN + "(?:" + DATATYPE + "|" + LANGUAGE + ")?)");
final public static Pattern WS = Pattern.compile("[ \\t]+");
final public static Pattern WSO = Pattern.compile("[ \\t]*");
final public static Pattern EOLN = Pattern.compile("(?:\r\n)|(?:\n)|(?:\r)");
final public static Pattern EMPTY = Pattern.compile("^" + WSO + "$");
// define quad part regexes
final public static Pattern SUBJECT = Pattern.compile("(?:" + IRI + "|" + BNODE + ")" + WS);
final public static Pattern PROPERTY = Pattern.compile(IRI.pattern() + WS.pattern());
final public static Pattern OBJECT = Pattern
.compile("(?:" + IRI + "|" + BNODE + "|" + LITERAL + ")" + WSO);
final public static Pattern GRAPH = Pattern
.compile("(?:\\.|(?:(?:" + IRI + "|" + BNODE + ")" + WSO + "\\.))");
// full quad regex
final public static Pattern QUAD = Pattern
.compile("^" + WSO + SUBJECT + PROPERTY + OBJECT + GRAPH + WSO + "$");
}
/**
* Parses RDF in the form of N-Quads.
*
* @param input
* the N-Quads input to parse.
*
* @return an RDF dataset.
* @throws JsonLdError
* If there was an error parsing the N-Quads document.
*/
public static RDFDataset parseNQuads(String input) throws JsonLdError {
// build RDF dataset
final RDFDataset dataset = new RDFDataset();
// split N-Quad input into lines
final String[] lines = Regex.EOLN.split(input);
int lineNumber = 0;
for (final String line : lines) {
lineNumber++;
// skip empty lines
if (Regex.EMPTY.matcher(line).matches()) {
continue;
}
// parse quad
final Matcher match = Regex.QUAD.matcher(line);
if (!match.matches()) {
throw new JsonLdError(JsonLdError.Error.SYNTAX_ERROR,
"Error while parsing N-Quads; invalid quad. line:" + lineNumber);
}
// get subject
RDFDataset.Node subject;
if (match.group(1) != null) {
subject = new RDFDataset.IRI(unescape(match.group(1)));
} else {
subject = new RDFDataset.BlankNode(unescape(match.group(2)));
}
// get predicate
final RDFDataset.Node predicate = new RDFDataset.IRI(unescape(match.group(3)));
// get object
RDFDataset.Node object;
if (match.group(4) != null) {
object = new RDFDataset.IRI(unescape(match.group(4)));
} else if (match.group(5) != null) {
object = new RDFDataset.BlankNode(unescape(match.group(5)));
} else {
final String language = unescape(match.group(8));
final String datatype = match.group(7) != null ? unescape(match.group(7))
: match.group(8) != null ? RDF_LANGSTRING : XSD_STRING;
final String unescaped = unescape(match.group(6));
object = new RDFDataset.Literal(unescaped, datatype, language);
}
// get graph name ('@default' is used for the default graph)
String name = "@default";
if (match.group(9) != null) {
name = unescape(match.group(9));
} else if (match.group(10) != null) {
name = unescape(match.group(10));
}
final RDFDataset.Quad triple = new RDFDataset.Quad(subject, predicate, object, name);
// initialise graph in dataset
if (!dataset.containsKey(name)) {
final List<RDFDataset.Quad> tmp = new ArrayList<RDFDataset.Quad>();
tmp.add(triple);
dataset.put(name, tmp);
}
// add triple if unique to its graph
else {
final List<RDFDataset.Quad> triples = (List<RDFDataset.Quad>) dataset.get(name);
if (!triples.contains(triple)) {
triples.add(triple);
}
}
}
return dataset;
}
}
|
|
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.sql.filter;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.regex.Pattern;
import com.orientechnologies.common.collection.OMultiValue;
import com.orientechnologies.orient.core.config.OStorageConfiguration;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.exception.OQueryParsingException;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.query.OQueryRuntimeValueMulti;
import com.orientechnologies.orient.core.record.ORecordSchemaAware;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.sql.OSQLHelper;
import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime;
import com.orientechnologies.orient.core.sql.operator.OQueryOperator;
/**
* Run-time query condition evaluator.
*
* @author Luca Garulli
*
*/
public class OSQLFilterCondition {
private static final String NULL_VALUE = "null";
protected Object left;
protected OQueryOperator operator;
protected Object right;
public OSQLFilterCondition(final Object iLeft, final OQueryOperator iOperator) {
this.left = iLeft;
this.operator = iOperator;
}
public OSQLFilterCondition(final Object iLeft, final OQueryOperator iOperator, final Object iRight) {
this.left = iLeft;
this.operator = iOperator;
this.right = iRight;
}
public Object evaluate(final ORecordSchemaAware<?> iRecord) {
Object l = evaluate(iRecord, left);
Object r = evaluate(iRecord, right);
final Object[] convertedValues = checkForConversion(iRecord, l, r);
if (convertedValues != null) {
l = convertedValues[0];
r = convertedValues[1];
}
if (operator == null)
// UNITARY OPERATOR: JUST RETURN LEFT RESULT
return l;
return operator.evaluateRecord(iRecord, this, l, r);
}
private Object[] checkForConversion(final ORecordSchemaAware<?> iRecord, final Object l, final Object r) {
Object[] result = null;
// DEFINED OPERATOR
if ((r instanceof String && r.equals(OSQLHelper.DEFINED)) || (l instanceof String && l.equals(OSQLHelper.DEFINED))) {
result = new Object[] { ((OSQLFilterItemAbstract) this.left).getRoot(), r };
}
else if (l != null && r != null && !l.getClass().isAssignableFrom(r.getClass()) && !r.getClass().isAssignableFrom(l.getClass()))
// INTEGERS
if (r instanceof Integer && !(l instanceof Number)) {
if (l instanceof String && ((String) l).indexOf('.') > -1)
result = new Object[] { new Float((String) l).intValue(), r };
else if (l instanceof Date)
result = new Object[] { ((Date) l).getTime(), r };
else if (!(l instanceof OQueryRuntimeValueMulti) && !(l instanceof Collection<?>) && !l.getClass().isArray())
result = new Object[] { getInteger(l), r };
} else if (l instanceof Integer && !(r instanceof Number)) {
if (r instanceof String && ((String) r).indexOf('.') > -1)
result = new Object[] { l, new Float((String) r).intValue() };
else if (r instanceof Date)
result = new Object[] { l, ((Date) r).getTime() };
else if (!(r instanceof OQueryRuntimeValueMulti) && !(r instanceof Collection<?>) && !r.getClass().isArray())
result = new Object[] { l, getInteger(r) };
}
// FLOATS
else if (r instanceof Float && !(l instanceof Float))
result = new Object[] { getFloat(l), r };
else if (l instanceof Float && !(r instanceof Float))
result = new Object[] { l, getFloat(r) };
// DATES
else if (r instanceof Date && !(l.getClass().isArray() || l instanceof Date)) {
result = new Object[] { getDate(iRecord, l), r };
} else if (l instanceof Date && !(r.getClass().isArray() || r instanceof Date)) {
result = new Object[] { l, getDate(iRecord, r) };
}
// RIDS
else if (r instanceof ORID && l instanceof String && !l.equals(OSQLHelper.NOT_NULL)) {
result = new Object[] { new ORecordId((String) l), r };
} else if (l instanceof ORID && r instanceof String && !r.equals(OSQLHelper.NOT_NULL)) {
result = new Object[] { l, new ORecordId((String) r) };
}
return result;
}
protected Integer getInteger(Object iValue) {
if (iValue == null)
return null;
final String stringValue = iValue.toString();
if (NULL_VALUE.equals(stringValue))
return null;
if (OSQLHelper.DEFINED.equals(stringValue))
return null;
if (OStringSerializerHelper.contains(stringValue, '.') || OStringSerializerHelper.contains(stringValue, ','))
return (int) Float.parseFloat(stringValue);
else
return stringValue.length() > 0 ? new Integer(stringValue) : new Integer(0);
}
protected Float getFloat(final Object iValue) {
if (iValue == null)
return null;
final String stringValue = iValue.toString();
if (NULL_VALUE.equals(stringValue))
return null;
return stringValue.length() > 0 ? new Float(stringValue) : new Float(0);
}
protected Date getDate(final ORecordSchemaAware<?> iRecord, final Object iValue) {
if (iValue == null)
return null;
if (iValue instanceof Long)
return new Date(((Long) iValue).longValue());
String stringValue = iValue.toString();
if (NULL_VALUE.equals(stringValue))
return null;
if (stringValue.length() <= 0)
return null;
if (Pattern.matches("^\\d+$", stringValue)) {
return new Date(Long.valueOf(stringValue).longValue());
}
final OStorageConfiguration config = iRecord.getDatabase().getStorage().getConfiguration();
DateFormat formatter = config.getDateFormatInstance();
if (stringValue.length() > config.dateFormat.length()) {
// ASSUMES YOU'RE USING THE DATE-TIME FORMATTE
formatter = config.getDateTimeFormatInstance();
}
try {
return formatter.parse(stringValue);
} catch (ParseException pe) {
throw new OQueryParsingException("Error on conversion of date '" + stringValue + "' using the format: "
+ formatter.toString());
}
}
protected Object evaluate(ORecordSchemaAware<?> iRecord, final Object iValue) {
if (iRecord.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) {
try {
iRecord = (ORecordSchemaAware<?>) iRecord.load();
} catch (ORecordNotFoundException e) {
return null;
}
}
if (iValue instanceof OSQLFilterItem)
return ((OSQLFilterItem) iValue).getValue(iRecord);
else if (iValue instanceof OSQLFilterCondition)
// NESTED CONDITION: EVALUATE IT RECURSIVELY
return ((OSQLFilterCondition) iValue).evaluate(iRecord);
else if (iValue instanceof OSQLFunctionRuntime) {
// STATELESS FUNCTION: EXECUTE IT
final OSQLFunctionRuntime f = (OSQLFunctionRuntime) iValue;
return f.execute(iRecord);
}
final Object firstValue = OMultiValue.getFirstValue(iValue);
if (firstValue != null && firstValue instanceof OSQLFilterItem) {
// MULTI VALUE: RETURN A COPY
final ArrayList<Object> result = new ArrayList<Object>(OMultiValue.getSize(iValue));
for (Object value : OMultiValue.getMultiValueIterable(iValue)) {
if (value instanceof OSQLFilterItem)
result.add(((OSQLFilterItem) value).getValue(iRecord));
else
result.add(value);
}
return result;
}
// SIMPLE VALUE: JUST RETURN IT
return iValue;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append('(');
buffer.append(left);
if (operator != null) {
buffer.append(' ');
buffer.append(operator);
buffer.append(' ');
buffer.append(right);
buffer.append(')');
}
return buffer.toString();
}
public Object getLeft() {
return left;
}
public Object getRight() {
return right;
}
public OQueryOperator getOperator() {
return operator;
}
public void setLeft(final Object iValue) {
left = iValue;
}
public void setRight(final Object iValue) {
right = iValue;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.ReplicationOperation;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo;
import org.elasticsearch.action.support.replication.TransportWriteAction;
import org.elasticsearch.action.update.UpdateHelper;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.seqno.SequenceNumbersService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.index.translog.Translog.Location;
import org.elasticsearch.action.bulk.BulkItemResultHolder;
import org.elasticsearch.action.bulk.BulkItemResponse;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import java.util.function.LongSupplier;
/** Performs shard-level bulk (index, delete or update) operations */
public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequest, BulkShardRequest, BulkShardResponse> {
public static final String ACTION_NAME = BulkAction.NAME + "[s]";
private static final Logger logger = ESLoggerFactory.getLogger(TransportShardBulkAction.class);
private final UpdateHelper updateHelper;
private final MappingUpdatedAction mappingUpdatedAction;
@Inject
public TransportShardBulkAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
MappingUpdatedAction mappingUpdatedAction, UpdateHelper updateHelper, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, ACTION_NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, actionFilters,
indexNameExpressionResolver, BulkShardRequest::new, BulkShardRequest::new, ThreadPool.Names.BULK);
this.updateHelper = updateHelper;
this.mappingUpdatedAction = mappingUpdatedAction;
}
@Override
protected TransportRequestOptions transportOptions() {
return BulkAction.INSTANCE.transportOptions(settings);
}
@Override
protected BulkShardResponse newResponseInstance() {
return new BulkShardResponse();
}
@Override
protected boolean resolveIndex() {
return false;
}
@Override
public WritePrimaryResult<BulkShardRequest, BulkShardResponse> shardOperationOnPrimary(
BulkShardRequest request, IndexShard primary) throws Exception {
final IndexMetaData metaData = primary.indexSettings().getIndexMetaData();
Translog.Location location = null;
final MappingUpdatePerformer mappingUpdater = new ConcreteMappingUpdatePerformer();
for (int requestIndex = 0; requestIndex < request.items().length; requestIndex++) {
location = executeBulkItemRequest(metaData, primary, request, location, requestIndex,
updateHelper, threadPool::absoluteTimeInMillis, mappingUpdater);
}
BulkItemResponse[] responses = new BulkItemResponse[request.items().length];
BulkItemRequest[] items = request.items();
for (int i = 0; i < items.length; i++) {
responses[i] = items[i].getPrimaryResponse();
}
BulkShardResponse response = new BulkShardResponse(request.shardId(), responses);
return new WritePrimaryResult<>(request, response, location, null, primary, logger);
}
private static BulkItemResultHolder executeIndexRequest(final IndexRequest indexRequest,
final BulkItemRequest bulkItemRequest,
final IndexShard primary,
final MappingUpdatePerformer mappingUpdater) throws Exception {
Engine.IndexResult indexResult = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater);
if (indexResult.hasFailure()) {
return new BulkItemResultHolder(null, indexResult, bulkItemRequest);
} else {
IndexResponse response = new IndexResponse(primary.shardId(), indexRequest.type(), indexRequest.id(),
indexResult.getSeqNo(), indexResult.getVersion(), indexResult.isCreated());
return new BulkItemResultHolder(response, indexResult, bulkItemRequest);
}
}
private static BulkItemResultHolder executeDeleteRequest(final DeleteRequest deleteRequest,
final BulkItemRequest bulkItemRequest,
final IndexShard primary) throws IOException {
Engine.DeleteResult deleteResult = executeDeleteRequestOnPrimary(deleteRequest, primary);
if (deleteResult.hasFailure()) {
return new BulkItemResultHolder(null, deleteResult, bulkItemRequest);
} else {
DeleteResponse response = new DeleteResponse(primary.shardId(), deleteRequest.type(), deleteRequest.id(),
deleteResult.getSeqNo(), deleteResult.getVersion(), deleteResult.isFound());
return new BulkItemResultHolder(response, deleteResult, bulkItemRequest);
}
}
static Translog.Location calculateTranslogLocation(final Translog.Location originalLocation,
final BulkItemResultHolder bulkItemResult) {
final Engine.Result operationResult = bulkItemResult.operationResult;
if (operationResult != null && operationResult.hasFailure() == false) {
return locationToSync(originalLocation, operationResult.getTranslogLocation());
} else {
return originalLocation;
}
}
// Visible for unit testing
/**
* Creates a BulkItemResponse for the primary operation and returns it. If no bulk response is
* needed (because one already exists and the operation failed), then return null.
*/
static BulkItemResponse createPrimaryResponse(BulkItemResultHolder bulkItemResult,
final DocWriteRequest.OpType opType,
BulkShardRequest request) {
final Engine.Result operationResult = bulkItemResult.operationResult;
final DocWriteResponse response = bulkItemResult.response;
final BulkItemRequest replicaRequest = bulkItemResult.replicaRequest;
if (operationResult == null) { // in case of noop update operation
assert response.getResult() == DocWriteResponse.Result.NOOP : "only noop updates can have a null operation";
return new BulkItemResponse(replicaRequest.id(), opType, response);
} else if (operationResult.hasFailure() == false) {
BulkItemResponse primaryResponse = new BulkItemResponse(replicaRequest.id(), opType, response);
// set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though.
primaryResponse.getResponse().setShardInfo(new ShardInfo());
return primaryResponse;
} else {
DocWriteRequest docWriteRequest = replicaRequest.request();
Exception failure = operationResult.getFailure();
if (isConflictException(failure)) {
logger.trace((Supplier<?>) () -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}",
request.shardId(), docWriteRequest.opType().getLowercase(), request), failure);
} else {
logger.debug((Supplier<?>) () -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}",
request.shardId(), docWriteRequest.opType().getLowercase(), request), failure);
}
// if it's a conflict failure, and we already executed the request on a primary (and we execute it
// again, due to primary relocation and only processing up to N bulk items when the shard gets closed)
// then just use the response we got from the failed execution
if (replicaRequest.getPrimaryResponse() == null || isConflictException(failure) == false) {
return new BulkItemResponse(replicaRequest.id(), docWriteRequest.opType(),
// Make sure to use request.index() here, if you
// use docWriteRequest.index() it will use the
// concrete index instead of an alias if used!
new BulkItemResponse.Failure(request.index(), docWriteRequest.type(), docWriteRequest.id(), failure));
} else {
assert replicaRequest.getPrimaryResponse() != null : "replica request must have a primary response";
return null;
}
}
}
/** Executes bulk item requests and handles request execution exceptions */
static Translog.Location executeBulkItemRequest(IndexMetaData metaData, IndexShard primary,
BulkShardRequest request, Translog.Location location,
int requestIndex, UpdateHelper updateHelper,
LongSupplier nowInMillisSupplier,
final MappingUpdatePerformer mappingUpdater) throws Exception {
final DocWriteRequest itemRequest = request.items()[requestIndex].request();
final DocWriteRequest.OpType opType = itemRequest.opType();
final BulkItemResultHolder responseHolder;
switch (itemRequest.opType()) {
case CREATE:
case INDEX:
responseHolder = executeIndexRequest((IndexRequest) itemRequest,
request.items()[requestIndex], primary, mappingUpdater);
break;
case UPDATE:
responseHolder = executeUpdateRequest((UpdateRequest) itemRequest, primary, metaData, request,
requestIndex, updateHelper, nowInMillisSupplier, mappingUpdater);
break;
case DELETE:
responseHolder = executeDeleteRequest((DeleteRequest) itemRequest, request.items()[requestIndex], primary);
break;
default: throw new IllegalStateException("unexpected opType [" + itemRequest.opType() + "] found");
}
final BulkItemRequest replicaRequest = responseHolder.replicaRequest;
// update the bulk item request because update request execution can mutate the bulk item request
request.items()[requestIndex] = replicaRequest;
// Retrieve the primary response, and update the replica request with the primary's response
BulkItemResponse primaryResponse = createPrimaryResponse(responseHolder, opType, request);
if (primaryResponse != null) {
replicaRequest.setPrimaryResponse(primaryResponse);
}
// Update the translog with the new location, if needed
return calculateTranslogLocation(location, responseHolder);
}
private static boolean isConflictException(final Exception e) {
return ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException;
}
/**
* Executes update request, delegating to a index or delete operation after translation,
* handles retries on version conflict and constructs update response
* NOTE: reassigns bulk item request at <code>requestIndex</code> for replicas to
* execute translated update request (NOOP update is an exception). NOOP updates are
* indicated by returning a <code>null</code> operation in {@link BulkItemResultHolder}
* */
private static BulkItemResultHolder executeUpdateRequest(UpdateRequest updateRequest, IndexShard primary,
IndexMetaData metaData, BulkShardRequest request,
int requestIndex, UpdateHelper updateHelper,
LongSupplier nowInMillis,
final MappingUpdatePerformer mappingUpdater) throws Exception {
Engine.Result updateOperationResult = null;
UpdateResponse updateResponse = null;
BulkItemRequest replicaRequest = request.items()[requestIndex];
int maxAttempts = updateRequest.retryOnConflict();
for (int attemptCount = 0; attemptCount <= maxAttempts; attemptCount++) {
final UpdateHelper.Result translate;
// translate update request
try {
translate = updateHelper.prepare(updateRequest, primary, nowInMillis);
} catch (Exception failure) {
// we may fail translating a update to index or delete operation
// we use index result to communicate failure while translating update request
updateOperationResult = new Engine.IndexResult(failure, updateRequest.version(), SequenceNumbersService.UNASSIGNED_SEQ_NO);
break; // out of retry loop
}
// execute translated update request
switch (translate.getResponseResult()) {
case CREATED:
case UPDATED:
IndexRequest indexRequest = translate.action();
MappingMetaData mappingMd = metaData.mappingOrDefault(indexRequest.type());
indexRequest.process(mappingMd, request.index());
updateOperationResult = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater);
break;
case DELETED:
DeleteRequest deleteRequest = translate.action();
updateOperationResult = executeDeleteRequestOnPrimary(deleteRequest, primary);
break;
case NOOP:
primary.noopUpdate(updateRequest.type());
break;
default: throw new IllegalStateException("Illegal update operation " + translate.getResponseResult());
}
if (updateOperationResult == null) {
// this is a noop operation
updateResponse = translate.action();
break; // out of retry loop
} else if (updateOperationResult.hasFailure() == false) {
// enrich update response and
// set translated update (index/delete) request for replica execution in bulk items
switch (updateOperationResult.getOperationType()) {
case INDEX:
IndexRequest updateIndexRequest = translate.action();
final IndexResponse indexResponse = new IndexResponse(primary.shardId(),
updateIndexRequest.type(), updateIndexRequest.id(), updateOperationResult.getSeqNo(),
updateOperationResult.getVersion(), ((Engine.IndexResult) updateOperationResult).isCreated());
BytesReference indexSourceAsBytes = updateIndexRequest.source();
updateResponse = new UpdateResponse(indexResponse.getShardInfo(),
indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getSeqNo(),
indexResponse.getVersion(), indexResponse.getResult());
if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) ||
(updateRequest.fields() != null && updateRequest.fields().length > 0)) {
Tuple<XContentType, Map<String, Object>> sourceAndContent =
XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType());
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(),
indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
}
// set translated request as replica request
replicaRequest = new BulkItemRequest(request.items()[requestIndex].id(), updateIndexRequest);
break;
case DELETE:
DeleteRequest updateDeleteRequest = translate.action();
DeleteResponse deleteResponse = new DeleteResponse(primary.shardId(),
updateDeleteRequest.type(), updateDeleteRequest.id(), updateOperationResult.getSeqNo(),
updateOperationResult.getVersion(), ((Engine.DeleteResult) updateOperationResult).isFound());
updateResponse = new UpdateResponse(deleteResponse.getShardInfo(),
deleteResponse.getShardId(), deleteResponse.getType(), deleteResponse.getId(), deleteResponse.getSeqNo(),
deleteResponse.getVersion(), deleteResponse.getResult());
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest,
request.index(), deleteResponse.getVersion(), translate.updatedSourceAsMap(),
translate.updateSourceContentType(), null));
// set translated request as replica request
replicaRequest = new BulkItemRequest(request.items()[requestIndex].id(), updateDeleteRequest);
break;
}
assert updateOperationResult.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO;
// successful operation
break; // out of retry loop
} else if (updateOperationResult.getFailure() instanceof VersionConflictEngineException == false) {
// not a version conflict exception
break; // out of retry loop
}
}
return new BulkItemResultHolder(updateResponse, updateOperationResult, replicaRequest);
}
static boolean shouldExecuteReplicaItem(final BulkItemRequest request, final int index) {
final BulkItemResponse primaryResponse = request.getPrimaryResponse();
assert primaryResponse != null : "expected primary response to be set for item [" + index + "] request ["+ request.request() +"]";
return primaryResponse.isFailed() == false &&
primaryResponse.getResponse().getResult() != DocWriteResponse.Result.NOOP;
}
@Override
public WriteReplicaResult<BulkShardRequest> shardOperationOnReplica(BulkShardRequest request, IndexShard replica) throws Exception {
Translog.Location location = null;
for (int i = 0; i < request.items().length; i++) {
BulkItemRequest item = request.items()[i];
if (shouldExecuteReplicaItem(item, i)) {
DocWriteRequest docWriteRequest = item.request();
DocWriteResponse primaryResponse = item.getPrimaryResponse().getResponse();
final Engine.Result operationResult;
try {
switch (docWriteRequest.opType()) {
case CREATE:
case INDEX:
operationResult = executeIndexRequestOnReplica(primaryResponse, (IndexRequest) docWriteRequest, replica);
break;
case DELETE:
operationResult = executeDeleteRequestOnReplica(primaryResponse, (DeleteRequest) docWriteRequest, replica);
break;
default:
throw new IllegalStateException("Unexpected request operation type on replica: "
+ docWriteRequest.opType().getLowercase());
}
if (operationResult.hasFailure()) {
// check if any transient write operation failures should be bubbled up
Exception failure = operationResult.getFailure();
assert failure instanceof VersionConflictEngineException
|| failure instanceof MapperParsingException
: "expected any one of [version conflict, mapper parsing, engine closed, index shard closed]" +
" failures. got " + failure;
if (!TransportActions.isShardNotAvailableException(failure)) {
throw failure;
}
} else {
location = locationToSync(location, operationResult.getTranslogLocation());
}
} catch (Exception e) {
// if its not an ignore replica failure, we need to make sure to bubble up the failure
// so we will fail the shard
if (!TransportActions.isShardNotAvailableException(e)) {
throw e;
}
}
}
}
return new WriteReplicaResult<>(request, location, null, replica, logger);
}
private static Translog.Location locationToSync(Translog.Location current,
Translog.Location next) {
/* here we are moving forward in the translog with each operation. Under the hood this might
* cross translog files which is ok since from the user perspective the translog is like a
* tape where only the highest location needs to be fsynced in order to sync all previous
* locations even though they are not in the same file. When the translog rolls over files
* the previous file is fsynced on after closing if needed.*/
assert next != null : "next operation can't be null";
assert current == null || current.compareTo(next) < 0 :
"translog locations are not increasing";
return next;
}
/**
* Execute the given {@link IndexRequest} on a replica shard, throwing a
* {@link RetryOnReplicaException} if the operation needs to be re-tried.
*/
public static Engine.IndexResult executeIndexRequestOnReplica(
DocWriteResponse primaryResponse,
IndexRequest request,
IndexShard replica) throws IOException {
final Engine.Index operation;
try {
operation = prepareIndexOperationOnReplica(primaryResponse, request, replica);
} catch (MapperParsingException e) {
return new Engine.IndexResult(e, primaryResponse.getVersion(),
primaryResponse.getSeqNo());
}
Mapping update = operation.parsedDoc().dynamicMappingsUpdate();
if (update != null) {
final ShardId shardId = replica.shardId();
throw new RetryOnReplicaException(shardId,
"Mappings are not available on the replica yet, triggered update: " + update);
}
return replica.index(operation);
}
/** Utility method to prepare an index operation on replica shards */
static Engine.Index prepareIndexOperationOnReplica(
DocWriteResponse primaryResponse,
IndexRequest request,
IndexShard replica) {
final ShardId shardId = replica.shardId();
final long version = primaryResponse.getVersion();
final long seqNo = primaryResponse.getSeqNo();
final SourceToParse sourceToParse =
SourceToParse.source(SourceToParse.Origin.REPLICA, shardId.getIndexName(),
request.type(), request.id(), request.source(), request.getContentType())
.routing(request.routing()).parent(request.parent());
final VersionType versionType = request.versionType().versionTypeForReplicationAndRecovery();
assert versionType.validateVersionForWrites(version);
return replica.prepareIndexOnReplica(sourceToParse, seqNo, version, versionType,
request.getAutoGeneratedTimestamp(), request.isRetry());
}
/** Utility method to prepare an index operation on primary shards */
static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard primary) {
final SourceToParse sourceToParse =
SourceToParse.source(SourceToParse.Origin.PRIMARY, request.index(), request.type(),
request.id(), request.source(), request.getContentType())
.routing(request.routing()).parent(request.parent());
return primary.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType(),
request.getAutoGeneratedTimestamp(), request.isRetry());
}
/** Executes index operation on primary shard after updates mapping if dynamic mappings are found */
public static Engine.IndexResult executeIndexRequestOnPrimary(IndexRequest request, IndexShard primary,
MappingUpdatePerformer mappingUpdater) throws Exception {
// Update the mappings if parsing the documents includes new dynamic updates
final Engine.Index preUpdateOperation;
final Mapping mappingUpdate;
final boolean mappingUpdateNeeded;
try {
preUpdateOperation = prepareIndexOperationOnPrimary(request, primary);
mappingUpdate = preUpdateOperation.parsedDoc().dynamicMappingsUpdate();
mappingUpdateNeeded = mappingUpdate != null;
if (mappingUpdateNeeded) {
mappingUpdater.updateMappings(mappingUpdate, primary.shardId(), request.type());
}
} catch (MapperParsingException | IllegalArgumentException failure) {
return new Engine.IndexResult(failure, request.version());
}
// Verify that there are no more mappings that need to be applied. If there are failures, a
// ReplicationOperation.RetryOnPrimaryException is thrown.
final Engine.Index operation;
if (mappingUpdateNeeded) {
try {
operation = prepareIndexOperationOnPrimary(request, primary);
mappingUpdater.verifyMappings(operation, primary.shardId());
} catch (MapperParsingException | IllegalStateException e) {
// there was an error in parsing the document that was not because
// of pending mapping updates, so return a failure for the result
return new Engine.IndexResult(e, request.version());
}
} else {
// There was no mapping update, the operation is the same as the pre-update version.
operation = preUpdateOperation;
}
return primary.index(operation);
}
private static Engine.DeleteResult executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard primary) throws IOException {
final Engine.Delete delete = primary.prepareDeleteOnPrimary(request.type(), request.id(), request.version(), request.versionType());
return primary.delete(delete);
}
private static Engine.DeleteResult executeDeleteRequestOnReplica(DocWriteResponse primaryResponse, DeleteRequest request, IndexShard replica) throws IOException {
final VersionType versionType = request.versionType().versionTypeForReplicationAndRecovery();
final long version = primaryResponse.getVersion();
assert versionType.validateVersionForWrites(version);
final Engine.Delete delete = replica.prepareDeleteOnReplica(request.type(), request.id(),
primaryResponse.getSeqNo(), request.primaryTerm(), version, versionType);
return replica.delete(delete);
}
class ConcreteMappingUpdatePerformer implements MappingUpdatePerformer {
public void updateMappings(final Mapping update, final ShardId shardId,
final String type) throws Exception {
if (update != null) {
// can throw timeout exception when updating mappings or ISE for attempting to
// update default mappings which are bubbled up
mappingUpdatedAction.updateMappingOnMaster(shardId.getIndex(), type, update);
}
}
public void verifyMappings(final Engine.Index operation,
final ShardId shardId) throws Exception {
if (operation.parsedDoc().dynamicMappingsUpdate() != null) {
throw new ReplicationOperation.RetryOnPrimaryException(shardId,
"Dynamic mappings are not available on the node that holds the primary yet");
}
}
}
}
|
|
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.server.journal;
import org.fcrepo.server.Context;
import org.fcrepo.server.errors.InvalidStateException;
import org.fcrepo.server.errors.ModuleInitializationException;
import org.fcrepo.server.errors.ModuleShutdownException;
import org.fcrepo.server.errors.ServerException;
import org.fcrepo.server.journal.recoverylog.JournalRecoveryLog;
import org.fcrepo.server.management.ManagementDelegate;
import org.fcrepo.server.messaging.PName;
import org.fcrepo.server.storage.types.Datastream;
import org.fcrepo.server.storage.types.RelationshipTuple;
import org.fcrepo.server.storage.types.Validation;
import java.io.InputStream;
import java.util.Date;
import java.util.Map;
/**
* The JournalWorker class to use in recovery mode or in* following mode.
* <p>
* Create a <code>JournalConsumerThread</code> to process the journal. If any
* calls to Management methods come in from outside, reject them.
* </p>
*
* @author Jim Blake
*/
public class JournalConsumer
implements JournalWorker {
private final String role;
private final JournalConsumerThread consumerThread;
private final JournalReader reader;
private final JournalRecoveryLog recoveryLog;
private ManagementDelegate delegate;
/**
* Get the appropriate JournalReader and JournalRecoveryLog, based on the
* server parameters, and create a JournalConsumerThread that will process
* the journal entries, using that reader and that logger.
*/
public JournalConsumer(Map<String, String> parameters,
String role,
ServerInterface server)
throws ModuleInitializationException {
this.role = role;
recoveryLog = JournalRecoveryLog.getInstance(parameters, role, server);
reader =
JournalReader
.getInstance(parameters, role, recoveryLog, server);
consumerThread =
new JournalConsumerThread(parameters,
role,
server,
reader,
recoveryLog);
}
/**
* Get the ManagementDelegate module and pass it to the
* JournalConsumerThread, so it can start working.
*/
public void setManagementDelegate(ManagementDelegate delegate) {
this.delegate = delegate;
consumerThread.setManagementDelegate(delegate);
}
/**
* Tell the thread, the reader and the log to shut down.
*/
public void shutdown() throws ModuleShutdownException {
try {
consumerThread.shutdown();
reader.shutdown();
recoveryLog.shutdown("Server is shutting down.");
} catch (JournalException e) {
throw new ModuleShutdownException("Error closing journal reader.",
role,
e);
}
}
//
// -------------------------------------------------------------------------
//
// Reject outside calls to Management API methods that modify the
// repository.
//
// -------------------------------------------------------------------------
//
/**
* Reject API calls from outside while we are in recovery mode.
*/
public String ingest(Context context,
InputStream serialization,
String logMessage,
String format,
String encoding,
String pid) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date modifyObject(Context context,
String pid,
String state,
String label,
String ownerId,
String logMessage,
Date lastModifiedDate) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date purgeObject(Context context,
String pid,
String logMessage) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public String addDatastream(Context context,
String pid,
String dsID,
String[] altIDs,
String dsLabel,
boolean versionable,
String MIMEType,
String formatURI,
String location,
String controlGroup,
String dsState,
String checksumType,
String checksum,
String logMessage) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date modifyDatastreamByReference(Context context,
String pid,
String datastreamID,
String[] altIDs,
String dsLabel,
String mimeType,
String formatURI,
String dsLocation,
String checksumType,
String checksum,
String logMessage,
Date lastModifiedDate)
throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date modifyDatastreamByValue(Context context,
String pid,
String datastreamID,
String[] altIDs,
String dsLabel,
String mimeType,
String formatURI,
InputStream dsContent,
String checksumType,
String checksum,
String logMessage,
Date lastModifiedDate) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date[] purgeDatastream(Context context,
String pid,
String datastreamID,
Date startDT,
Date endDT,
String logMessage) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public String putTempStream(Context context, InputStream in)
throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date setDatastreamState(Context context,
String pid,
String dsID,
String dsState,
String logMessage) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date setDatastreamVersionable(Context context,
String pid,
String dsID,
boolean versionable,
String logMessage)
throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Date setDisseminatorState(Context context,
String pid,
String dsID,
String dsState,
String logMessage) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public String[] getNextPID(Context context, int numPIDs, String namespace)
throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public boolean addRelationship(Context context,
String pid,
String relationship,
String objURI,
boolean isLiteral,
String datatype) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public boolean purgeRelationship(Context context,
String pid,
String relationship,
String objURI,
boolean isLiteral,
String datatype) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
/**
* Reject API calls from outside while we are in recovery mode.
*/
public Validation validate(@PName("context") Context context, @PName("pid") String pid,
@PName("asOfDateTime") Date asOfDateTime) throws ServerException {
throw rejectCallsFromOutsideWhileInRecoveryMode();
}
//
// -------------------------------------------------------------------------
//
// Permit outside calls to Management API methods that do not modify the
// repository.
//
// -------------------------------------------------------------------------
//
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public String compareDatastreamChecksum(Context context,
String pid,
String dsID,
Date versionDate)
throws ServerException {
return delegate.compareDatastreamChecksum(context,
pid,
dsID,
versionDate);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public InputStream export(Context context,
String pid,
String format,
String exportContext,
String encoding) throws ServerException {
return delegate.export(context, pid, format, exportContext, encoding);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public InputStream getObjectXML(Context context, String pid, String encoding)
throws ServerException {
return delegate.getObjectXML(context, pid, encoding);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public Datastream getDatastream(Context context,
String pid,
String datastreamID,
Date asOfDateTime) throws ServerException {
return delegate.getDatastream(context, pid, datastreamID, asOfDateTime);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public Datastream[] getDatastreams(Context context,
String pid,
Date asOfDateTime,
String dsState) throws ServerException {
return delegate.getDatastreams(context, pid, asOfDateTime, dsState);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public Datastream[] getDatastreamHistory(Context context,
String pid,
String datastreamID)
throws ServerException {
return delegate.getDatastreamHistory(context, pid, datastreamID);
}
/**
* Read-only method: pass the call to the {@link ManagementDelegate}.
*/
public RelationshipTuple[] getRelationships(Context context,
String pid,
String relationship)
throws ServerException {
return delegate.getRelationships(context, pid, relationship);
}
/**
* Delegate to the ManagementDelegate. Note: Unlike other methods of the
* Management interface, this method is not exposed at the service level.
* Therefore, it is safe to forward the call to the delegate. It is also
* necessary because, in the course of fulfilling API-M requests that
* involve uploaded content, this method is invoked by internal server code.
*/
public InputStream getTempStream(String id) throws ServerException {
return delegate.getTempStream(id);
}
/**
* While the server is reading a Journal to recover its state, block any
* attempt to use the Management API.
*
* @throws ServerException
*/
private ServerException rejectCallsFromOutsideWhileInRecoveryMode() {
return new InvalidStateException("Server is in Journal Recovery mode.");
}
}
|
|
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.transform;
import com.google.common.collect.ImmutableList;
import org.gradle.api.Action;
import org.gradle.api.internal.artifacts.ivyservice.ArtifactCacheMetadata;
import org.gradle.api.internal.changedetection.state.InMemoryCacheDecoratorFactory;
import org.gradle.cache.CacheBuilder;
import org.gradle.cache.CacheRepository;
import org.gradle.cache.CleanupAction;
import org.gradle.cache.FileLockManager;
import org.gradle.cache.PersistentCache;
import org.gradle.cache.PersistentIndexedCache;
import org.gradle.cache.PersistentIndexedCacheParameters;
import org.gradle.cache.internal.CompositeCleanupAction;
import org.gradle.cache.internal.LeastRecentlyUsedCacheCleanup;
import org.gradle.cache.internal.ProducerGuard;
import org.gradle.cache.internal.SingleDepthFilesFinder;
import org.gradle.initialization.RootBuildLifecycleListener;
import org.gradle.internal.Factory;
import org.gradle.internal.UncheckedException;
import org.gradle.internal.concurrent.Stoppable;
import org.gradle.internal.hash.HashCode;
import org.gradle.internal.hash.Hasher;
import org.gradle.internal.hash.Hashing;
import org.gradle.internal.resource.local.DefaultPathKeyFileStore;
import org.gradle.internal.resource.local.FileAccessTimeJournal;
import org.gradle.internal.resource.local.FileAccessTracker;
import org.gradle.internal.resource.local.FileStore;
import org.gradle.internal.resource.local.FileStoreAddActionException;
import org.gradle.internal.resource.local.SingleDepthFileAccessTracker;
import org.gradle.internal.serialize.BaseSerializerFactory;
import org.gradle.internal.serialize.HashCodeSerializer;
import org.gradle.internal.serialize.ListSerializer;
import org.gradle.internal.snapshot.FileSystemLocationSnapshot;
import org.gradle.internal.snapshot.FileSystemSnapshotter;
import org.gradle.internal.util.BiFunction;
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static org.gradle.api.internal.artifacts.ivyservice.CacheLayout.TRANSFORMS_META_DATA;
import static org.gradle.api.internal.artifacts.ivyservice.CacheLayout.TRANSFORMS_STORE;
import static org.gradle.cache.internal.LeastRecentlyUsedCacheCleanup.DEFAULT_MAX_AGE_IN_DAYS_FOR_RECREATABLE_CACHE_ENTRIES;
import static org.gradle.cache.internal.filelock.LockOptionsBuilder.mode;
public class DefaultCachingTransformerExecutor implements CachingTransformerExecutor, Stoppable, RootBuildLifecycleListener {
private static final int FILE_TREE_DEPTH_TO_TRACK_AND_CLEANUP = 2;
private static final String CACHE_PREFIX = TRANSFORMS_META_DATA.getKey() + "/";
private final PersistentCache cache;
private final PersistentIndexedCache<HashCode, List<File>> indexedCache;
private final FileStore<String> fileStore;
private final ProducerGuard<CacheKey> producing = ProducerGuard.adaptive();
private final Map<CacheKey, List<File>> resultHashToResult = new ConcurrentHashMap<CacheKey, List<File>>();
private final FileSystemSnapshotter fileSystemSnapshotter;
private final FileAccessTracker fileAccessTracker;
public DefaultCachingTransformerExecutor(ArtifactCacheMetadata artifactCacheMetadata, CacheRepository cacheRepository, InMemoryCacheDecoratorFactory cacheDecoratorFactory,
FileSystemSnapshotter fileSystemSnapshotter, FileAccessTimeJournal fileAccessTimeJournal) {
this.fileSystemSnapshotter = fileSystemSnapshotter;
File transformsStoreDirectory = artifactCacheMetadata.getTransformsStoreDirectory();
File filesOutputDirectory = new File(transformsStoreDirectory, TRANSFORMS_STORE.getKey());
fileStore = new DefaultPathKeyFileStore(filesOutputDirectory);
cache = cacheRepository
.cache(transformsStoreDirectory)
.withCleanup(createCleanupAction(filesOutputDirectory, fileAccessTimeJournal))
.withCrossVersionCache(CacheBuilder.LockTarget.DefaultTarget)
.withDisplayName("Artifact transforms cache")
.withLockOptions(mode(FileLockManager.LockMode.None)) // Lock on demand
.open();
indexedCache = cache.createCache(PersistentIndexedCacheParameters.of(CACHE_PREFIX + "results", new HashCodeSerializer(), new ListSerializer<File>(BaseSerializerFactory.FILE_SERIALIZER))
.cacheDecorator(cacheDecoratorFactory.decorator(1000, true)));
fileAccessTracker = new SingleDepthFileAccessTracker(fileAccessTimeJournal, filesOutputDirectory, FILE_TREE_DEPTH_TO_TRACK_AND_CLEANUP);
}
private CleanupAction createCleanupAction(File filesOutputDirectory, FileAccessTimeJournal fileAccessTimeJournal) {
return CompositeCleanupAction.builder()
.add(filesOutputDirectory, new LeastRecentlyUsedCacheCleanup(new SingleDepthFilesFinder(FILE_TREE_DEPTH_TO_TRACK_AND_CLEANUP), fileAccessTimeJournal, DEFAULT_MAX_AGE_IN_DAYS_FOR_RECREATABLE_CACHE_ENTRIES))
.build();
}
@Override
public void stop() {
cache.close();
}
@Override
public void afterStart() {
}
@Override
public void beforeComplete() {
// Discard cached results between builds
resultHashToResult.clear();
}
@Override
public boolean contains(File absoluteFile, HashCode inputsHash) {
return resultHashToResult.containsKey(getCacheKey(absoluteFile, inputsHash));
}
@Override
public List<File> getResult(File primaryInput, Transformer transformer) {
File absolutePrimaryInput = primaryInput.getAbsoluteFile();
CacheKey cacheKey = getCacheKey(absolutePrimaryInput, transformer);
List<File> transformedFiles = resultHashToResult.get(cacheKey);
if (transformedFiles != null) {
return transformedFiles;
}
return loadIntoCache(absolutePrimaryInput, cacheKey, transformer);
}
/**
* Loads the transformed files from the file system cache into memory. Creates them if they are not present yet.
* This makes sure that only one thread tries to load a result for a given key.
*/
private List<File> loadIntoCache(final File inputFile, final CacheKey cacheKey, final BiFunction<List<File>, File, File> transformer) {
return producing.guardByKey(cacheKey, new Factory<List<File>>() {
@Override
public List<File> create() {
List<File> files = resultHashToResult.get(cacheKey);
if (files != null) {
return files;
}
files = cache.withFileLock(new Factory<List<File>>() {
@Override
public List<File> create() {
HashCode persistentCacheKey = cacheKey.getPersistentCacheKey();
List<File> files = indexedCache.get(persistentCacheKey);
if (files != null) {
boolean allExist = true;
for (File file : files) {
if (!file.exists()) {
allExist = false;
break;
}
}
if (allExist) {
return files;
}
}
String key = inputFile.getName() + "/" + persistentCacheKey;
TransformAction action = new TransformAction(transformer, inputFile);
try {
fileStore.add(key, action);
} catch (FileStoreAddActionException e) {
throw UncheckedException.throwAsUncheckedException(e.getCause());
}
indexedCache.put(persistentCacheKey, action.result);
return action.result;
}
});
fileAccessTracker.markAccessed(files);
resultHashToResult.put(cacheKey, files);
return files;
}
});
}
private CacheKey getCacheKey(File primaryInput, Transformer transformer) {
return getCacheKey(primaryInput, transformer.getSecondaryInputHash());
}
private CacheKey getCacheKey(File inputFile, HashCode inputsHash) {
FileSystemLocationSnapshot snapshot = fileSystemSnapshotter.snapshot(inputFile);
return new CacheKey(inputsHash, snapshot.getAbsolutePath(), snapshot.getHash());
}
/**
* A lightweight key for in-memory caching of transformation results.
* Computing the hash key for the persistent cache is a rather expensive
* operation, so we only calculate it when we have a cache miss in memory.
*/
private static class CacheKey {
private final String absolutePath;
private final HashCode fileContentHash;
private final HashCode inputHash;
public CacheKey(HashCode inputHash, String absolutePath, HashCode fileContentHash) {
this.absolutePath = absolutePath;
this.fileContentHash = fileContentHash;
this.inputHash = inputHash;
}
public HashCode getPersistentCacheKey() {
Hasher hasher = Hashing.newHasher();
hasher.putHash(inputHash);
hasher.putString(absolutePath);
hasher.putHash(fileContentHash);
return hasher.hash();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CacheKey cacheKey = (CacheKey) o;
if (!fileContentHash.equals(cacheKey.fileContentHash)) {
return false;
}
if (!inputHash.equals(cacheKey.inputHash)) {
return false;
}
return absolutePath.equals(cacheKey.absolutePath);
}
@Override
public int hashCode() {
int result = fileContentHash.hashCode();
result = 31 * result + absolutePath.hashCode();
result = 31 * result + inputHash.hashCode();
return result;
}
}
private static class TransformAction implements Action<File> {
private final BiFunction<List<File>, File, File> transformer;
private final File inputFile;
private ImmutableList<File> result;
TransformAction(BiFunction<List<File>, File, File> transformer, File inputFile) {
this.transformer = transformer;
this.inputFile = inputFile;
}
@Override
public void execute(File outputDir) {
outputDir.mkdirs();
result = ImmutableList.copyOf(transformer.apply(inputFile, outputDir));
}
}
}
|
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.bookie;
/**
* Signals that a Bookie exception of some sort has occurred. This class
* is the general class of exceptions produced by failed or interrupted bookie operations.
*/
@SuppressWarnings("serial")
public abstract class BookieException extends Exception {
private int code;
public BookieException(int code) {
this.code = code;
}
public BookieException(int code, Throwable t) {
super(t);
}
public BookieException(int code, String reason) {
super(reason);
}
public BookieException(int code, String reason, Throwable t) {
super(reason, t);
}
public static BookieException create(int code) {
switch(code) {
case Code.UnauthorizedAccessException:
return new BookieUnauthorizedAccessException();
case Code.LedgerFencedException:
return new LedgerFencedException();
case Code.InvalidCookieException:
return new InvalidCookieException();
case Code.UpgradeException:
return new UpgradeException();
case Code.DiskPartitionDuplicationException:
return new DiskPartitionDuplicationException();
case Code.CookieNotFoundException:
return new CookieNotFoundException();
case Code.MetadataStoreException:
return new MetadataStoreException();
case Code.UnknownBookieIdException:
return new UnknownBookieIdException();
default:
return new BookieIllegalOpException();
}
}
/**
* An exception code indicates the failure reason.
*/
public interface Code {
int OK = 0;
int UnauthorizedAccessException = -1;
int IllegalOpException = -100;
int LedgerFencedException = -101;
int InvalidCookieException = -102;
int UpgradeException = -103;
int DiskPartitionDuplicationException = -104;
int CookieNotFoundException = -105;
int MetadataStoreException = -106;
int UnknownBookieIdException = -107;
int OperationRejectedException = -108;
}
public void setCode(int code) {
this.code = code;
}
public int getCode() {
return this.code;
}
public String getMessage(int code) {
String err;
switch(code) {
case Code.OK:
err = "No problem";
break;
case Code.UnauthorizedAccessException:
err = "Error while reading ledger";
break;
case Code.LedgerFencedException:
err = "Ledger has been fenced; No more entries can be added";
break;
case Code.InvalidCookieException:
err = "Invalid environment cookie found";
break;
case Code.UpgradeException:
err = "Error performing an upgrade operation ";
break;
case Code.DiskPartitionDuplicationException:
err = "Disk Partition Duplication is not allowed";
break;
case Code.CookieNotFoundException:
err = "Cookie not found";
break;
case Code.MetadataStoreException:
err = "Error performing metadata operations";
break;
case Code.UnknownBookieIdException:
err = "Unknown bookie id";
break;
case Code.OperationRejectedException:
err = "Operation rejected";
break;
default:
err = "Invalid operation";
break;
}
String reason = super.getMessage();
if (reason == null) {
if (super.getCause() != null) {
reason = super.getCause().getMessage();
}
}
if (reason == null) {
return err;
} else {
return String.format("%s [%s]", err, reason);
}
}
/**
* Signals that an unauthorized operation attempts to access the data in a bookie.
*/
public static class BookieUnauthorizedAccessException extends BookieException {
public BookieUnauthorizedAccessException() {
super(Code.UnauthorizedAccessException);
}
}
/**
* Signals that an illegal operation attempts to access the data in a bookie.
*/
public static class BookieIllegalOpException extends BookieException {
public BookieIllegalOpException() {
super(Code.IllegalOpException);
}
public BookieIllegalOpException(String reason) {
super(Code.IllegalOpException, reason);
}
public BookieIllegalOpException(Throwable cause) {
super(Code.IllegalOpException, cause);
}
}
/**
* Signals that a ledger has been fenced in a bookie. No more entries can be appended to that ledger.
*/
public static class LedgerFencedException extends BookieException {
public LedgerFencedException() {
super(Code.LedgerFencedException);
}
}
/**
* Signals that a ledger has been fenced in a bookie. No more entries can be appended to that ledger.
*/
public static class OperationRejectedException extends BookieException {
public OperationRejectedException() {
super(Code.OperationRejectedException);
}
@Override
public Throwable fillInStackTrace() {
// Since this exception is a way to signal a specific condition and it's triggered and very specific points,
// we can disable stack traces.
return null;
}
}
/**
* Signal that an invalid cookie is found when starting a bookie.
*
* <p>This exception is mainly used for detecting if there is any malformed configuration in a bookie.
*/
public static class InvalidCookieException extends BookieException {
public InvalidCookieException() {
this("");
}
public InvalidCookieException(String reason) {
super(Code.InvalidCookieException, reason);
}
public InvalidCookieException(Throwable cause) {
super(Code.InvalidCookieException, cause);
}
}
/**
* Signal that no cookie is found when starting a bookie.
*/
public static class CookieNotFoundException extends BookieException {
public CookieNotFoundException() {
this("");
}
public CookieNotFoundException(String reason) {
super(Code.CookieNotFoundException, reason);
}
public CookieNotFoundException(Throwable cause) {
super(Code.CookieNotFoundException, cause);
}
}
/**
* Signals that an exception occurs on upgrading a bookie.
*/
public static class UpgradeException extends BookieException {
public UpgradeException() {
super(Code.UpgradeException);
}
public UpgradeException(Throwable cause) {
super(Code.UpgradeException, cause);
}
public UpgradeException(String reason) {
super(Code.UpgradeException, reason);
}
}
/**
* Signals when multiple ledger/journal directories are mounted in same disk partition.
*/
public static class DiskPartitionDuplicationException extends BookieException {
public DiskPartitionDuplicationException() {
super(Code.DiskPartitionDuplicationException);
}
public DiskPartitionDuplicationException(Throwable cause) {
super(Code.DiskPartitionDuplicationException, cause);
}
public DiskPartitionDuplicationException(String reason) {
super(Code.DiskPartitionDuplicationException, reason);
}
}
/**
* Signal when bookie has problems on accessing metadata store.
*/
public static class MetadataStoreException extends BookieException {
public MetadataStoreException() {
this("");
}
public MetadataStoreException(String reason) {
super(Code.MetadataStoreException, reason);
}
public MetadataStoreException(Throwable cause) {
super(Code.MetadataStoreException, cause);
}
public MetadataStoreException(String reason, Throwable cause) {
super(Code.MetadataStoreException, reason, cause);
}
}
/**
* Signal when bookie has problems on accessing metadata store.
*/
public static class UnknownBookieIdException extends BookieException {
public UnknownBookieIdException() {
super(Code.UnknownBookieIdException);
}
public UnknownBookieIdException(Throwable cause) {
super(Code.UnknownBookieIdException, cause);
}
}
}
|
|
/**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.content.index;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.indy.model.core.StoreType;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Index;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import static org.commonjava.indy.pkg.maven.model.MavenPackageTypeDescriptor.MAVEN_PKG_KEY;
/**
* Created by jdcasey on 3/15/16.
*/
public class IndexedStorePath
implements Externalizable
{
private final Logger logger = LoggerFactory.getLogger( this.getClass() );
@Field( index = Index.YES, analyze = Analyze.NO )
private StoreType storeType;
@Field( index = Index.YES, analyze = Analyze.NO )
private String storeName;
@Field( index = Index.YES, analyze = Analyze.NO )
private StoreType originStoreType;
@Field( index = Index.YES, analyze = Analyze.NO )
private String originStoreName;
@Field( index = Index.YES, analyze = Analyze.NO )
private String path;
@Field( index = Index.YES, analyze = Analyze.NO )
private String packageType;
private transient StoreKey storeKey;
private transient StoreKey originKey;
// this needs to be public for Infinispan to not throw InvalidClassException with the first httprox request
public IndexedStorePath(){}
public IndexedStorePath( StoreKey storeKey, String path )
{
this.storeKey = storeKey;
this.packageType = storeKey.getPackageType();
if ( this.packageType == null )
{
this.packageType = MAVEN_PKG_KEY;
}
this.storeType = storeKey.getType();
this.storeName = storeKey.getName();
this.path = path;
}
public IndexedStorePath( StoreKey storeKey, StoreKey origin, String path )
{
this( storeKey, path );
this.originKey = origin;
this.originStoreType = origin.getType();
this.originStoreName = origin.getName();
}
@JsonIgnore
public StoreKey getStoreKey()
{
return storeKey != null ? storeKey : new StoreKey( packageType, storeType, storeName );
}
@JsonIgnore
public StoreKey getOriginStoreKey()
{
if ( originKey != null )
{
return originKey;
}
else if ( originStoreName != null )
{
return new StoreKey( packageType, originStoreType, originStoreName );
}
return null;
}
public StoreType getStoreType()
{
return storeType;
}
public String getStoreName()
{
return storeName;
}
public String getPath()
{
return path;
}
public StoreType getOriginStoreType()
{
return originStoreType;
}
public String getOriginStoreName()
{
return originStoreName;
}
public String getPackageType()
{
return packageType;
}
@Override
public String toString()
{
/* @formatter:off */
return "IndexedStorePath{" +
"\n object ref: " + super.hashCode() +
"\n hashcode: " + hashCode() +
"\n packageType=" + packageType +
"\n storeType=" + storeType +
"\n storeName=" + storeName +
"\n originStoreType=" + originStoreType +
"\n originStoreName=" + originStoreName +
"\n path='" + path + '\'' +
"\n}";
/* @formatter:on */
}
@Override
public boolean equals( Object o )
{
if ( this == o )
{
return true;
}
if ( !( o instanceof IndexedStorePath ) )
{
return false;
}
IndexedStorePath that = (IndexedStorePath) o;
if ( !getPackageType().equals( that.getPackageType() ) )
{
return false;
}
if ( getStoreType() != that.getStoreType() )
{
return false;
}
if ( !getStoreName().equals( that.getStoreName() ) )
{
return false;
}
return getPath().equals( that.getPath() );
}
@Override
public int hashCode()
{
int result = getPackageType().hashCode();
result = 31 * result + getStoreType().hashCode();
result = 31 * result + getStoreName().hashCode();
result = 31 * result + getPath().hashCode();
return result;
}
@Override
public void writeExternal( ObjectOutput out )
throws IOException
{
out.writeObject( storeType.name() );
out.writeObject( storeName );
if ( originStoreType != null )
{
out.writeObject( originStoreType.name() );
}
else
{
out.writeObject( "" );
}
if ( originStoreName != null )
{
out.writeObject( originStoreName );
}
else
{
out.writeObject( "" );
}
out.writeObject( path );
out.writeObject( packageType );
}
@Override
public void readExternal( ObjectInput in )
throws IOException, ClassNotFoundException
{
storeType = StoreType.get( (String) in.readObject() );
storeName = (String) in.readObject();
final String osTypeString = (String) in.readObject();
originStoreType = "".equals( osTypeString ) ? null : StoreType.get( osTypeString );
final String osNameString = (String) in.readObject();
originStoreName = "".equals( osNameString ) ? null : osNameString;
path = (String) in.readObject();
try
{
packageType = (String) in.readObject();
}
catch ( IOException e )
{
logger.warn( "Read packageType failed (probably reading an old data entry) and set to default 'maven', {}", e );
packageType = MAVEN_PKG_KEY;
}
}
}
|
|
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.lib.parser.xml;
import com.google.common.base.Strings;
import com.streamsets.pipeline.api.Field;
import com.streamsets.pipeline.api.ProtoConfigurableEntity;
import com.streamsets.pipeline.api.Record;
import com.streamsets.pipeline.api.ext.io.OverrunException;
import com.streamsets.pipeline.api.ext.io.OverrunReader;
import com.streamsets.pipeline.lib.parser.AbstractDataParser;
import com.streamsets.pipeline.lib.parser.DataParserException;
import com.streamsets.pipeline.lib.xml.OverrunStreamingXmlParser;
import com.streamsets.pipeline.lib.xml.StreamingXmlParser;
import javax.xml.stream.XMLStreamException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class XmlCharDataParser extends AbstractDataParser {
private static final Pattern INDEX_PATTERN = Pattern.compile("((.*)/\\S+)(\\[\\d+\\]).*");
private static final Pattern VALUE_PATTERN = Pattern.compile("'?(\\S+\\[\\d+\\])?'?/value$");
private static final Pattern ATTR_PATTERN = Pattern.compile("'?(\\S+\\[\\d+\\])?'?/'attr\\|(\\S+)'");
public static final String REMOVE_FIELD_PATH_SINGLE_QUOTE_PATTERN = "'?([^']*)'?(\\[\\d+\\])?";
public static final String RECORD_ATTRIBUTE_NAMESPACE_PREFIX = "xmlns:";
private final ProtoConfigurableEntity.Context context;
private final String readerId;
private final int maxObjectLen;
private final OverrunStreamingXmlParser parser;
private final boolean includeXpath;
private long readerOffset;
public XmlCharDataParser(
ProtoConfigurableEntity.Context context,
String readerId,
OverrunReader reader,
long readerOffset,
String recordElement,
boolean includeXpath,
Map<String, String> namespaces,
int maxObjectLen,
boolean useFieldAttributesInsteadOfFields,
boolean preserveRootElement
) throws IOException {
this.context = context;
this.readerId = readerId;
this.readerOffset = readerOffset;
this.maxObjectLen = maxObjectLen;
this.includeXpath = includeXpath;
try {
parser = new OverrunStreamingXmlParser(
reader,
recordElement,
namespaces,
readerOffset,
maxObjectLen,
useFieldAttributesInsteadOfFields,
preserveRootElement
);
} catch (XMLStreamException ex) {
throw new IOException(ex);
}
}
@Override
public Record parse() throws IOException, DataParserException {
Record record = null;
long offset = -1;
try {
offset = getOffsetAsLong();
Field field = parser.read();
readerOffset = -1;
if (field != null) {
record = createRecord(offset, field);
}
} catch (OverrunException ex) {
throw new DataParserException(Errors.XML_PARSER_02, readerId, offset, maxObjectLen);
} catch (XMLStreamException ex) {
throw new DataParserException(Errors.XML_PARSER_03, ex);
}
return record;
}
protected Record createRecord(long offset, Field field) throws DataParserException {
Record record = context.createRecord(readerId + "::" + offset);
record.set(field);
if (includeXpath) {
setFieldXpathAttributes(record);
}
return record;
}
private void setFieldXpathAttributes(Record record) {
for (String path : record.getEscapedFieldPaths()) {
// Only interested in leaves of the path tree so pass any complex types.
// This check is needed because an XML element may be named as "value".
if (record.get(path).getType() == Field.Type.LIST ||
record.get(path).getType() == Field.Type.LIST_MAP ||
record.get(path).getType() == Field.Type.MAP) {
continue;
}
Matcher matcher = VALUE_PATTERN.matcher(path);
Field field = record.get(path);
String xpath = null;
if (matcher.matches()) {
String fieldPath = removeSingleQuotesFromFieldPath(matcher.group(1));
xpath = toXpath(fieldPath, record);
} else {
matcher = ATTR_PATTERN.matcher(path);
if (matcher.matches()) {
String fieldPath = removeSingleQuotesFromFieldPath(matcher.group(1));
String attribute = matcher.group(2);
xpath = toXpath(fieldPath, record) + "/@" + attribute;
}
}
if (!Strings.isNullOrEmpty(xpath)) {
field.setAttribute(StreamingXmlParser.XPATH_KEY, xpath);
}
}
Record.Header header = record.getHeader();
for (Map.Entry<String, String> nsEntry : parser.getNamespaceUriToPrefixMappings().entrySet()) {
header.setAttribute(RECORD_ATTRIBUTE_NAMESPACE_PREFIX + nsEntry.getValue(), nsEntry.getKey());
}
}
private static String removeSingleQuotesFromFieldPath(String fieldPath) {
if (Strings.isNullOrEmpty(fieldPath)) {
return fieldPath;
} else {
return fieldPath.replaceAll(REMOVE_FIELD_PATH_SINGLE_QUOTE_PATTERN, "$1$2");
}
}
private String toXpath(String fieldPath, Record record) {
if (fieldPath == null) {
fieldPath = "";
}
String xpath = fieldPath;
List<MatchResult> matchResults = new ArrayList<>();
Matcher matcher = INDEX_PATTERN.matcher(fieldPath);
while (matcher.matches()) {
MatchResult matchResult = matcher.toMatchResult();
matchResults.add(matchResult);
String parentPath = matchResult.group(2);
matcher = INDEX_PATTERN.matcher(parentPath);
}
for (MatchResult matchResult : matchResults) {
String currentPath = matchResult.group(1);
String fieldIndex = matchResult.group(3);
// If the field is an array of a single value, flatten it out
// to make it comply with the XPath syntax.
if (record.get(currentPath).getValueAsList().size() == 1) {
xpath = xpath.replace(currentPath + fieldIndex, currentPath);
}
}
return addXPathPrefix(xpath);
}
private String addXPathPrefix(String xpath) {
String prefix = parser.getLastParsedFieldXpathPrefix();
if (parser.isPreserveRootElement() && !prefix.isEmpty()) {
// root element has already been included in the xpath and needs to be removed from the prefix
String[] parts = prefix.split("/");
prefix = parts.length > 2 ?
String.join("/", Arrays.copyOfRange(parts, 0, parts.length - 1)) : "";
}
return prefix.concat(xpath);
}
@Override
public String getOffset() throws DataParserException {
return String.valueOf(getOffsetAsLong());
}
private long getOffsetAsLong() throws DataParserException {
try {
return (readerOffset > -1) ? readerOffset : parser.getReaderPosition();
} catch (XMLStreamException ex) {
throw new DataParserException(Errors.XML_PARSER_01, ex.toString(), ex);
}
}
@Override
public void close() throws IOException {
parser.close();
}
}
|
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.util;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nullable;
/**
* Simplified wrapper for MD5 message digests. See also
* com.google.math.crypto.MD5HMAC for a similar interface.
*
* @see java.security.MessageDigest
*/
public final class Fingerprint {
private final MessageDigest md;
/**
* Creates and initializes a new MD5 object; if this fails, Java must be
* installed incorrectly.
*/
public Fingerprint() {
try {
md = MessageDigest.getInstance("md5");
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException("MD5 not available", e);
}
}
/**
* Completes the hash computation by doing final operations, e.g., padding.
*
* <p>This method has the side-effect of resetting the underlying digest computer.
*
* @return the MD5 digest as a 16-byte array
* @see java.security.MessageDigest#digest()
*/
public byte[] digestAndReset() {
return md.digest();
}
/**
* Completes the hash computation and returns the digest as a string.
*
* <p>This method has the side-effect of resetting the underlying digest computer.
*
* @return the MD5 digest as a 32-character string of hexadecimal digits
* @see com.google.math.crypto.MD5HMAC#toString()
*/
public String hexDigestAndReset() {
return hexDigest(digestAndReset());
}
/**
* Returns a string representation of an MD5 digest.
*
* @param digest the MD5 digest, perhaps from a previous call to digest
* @return the digest as a 32-character string of hexadecimal digits
*/
public static String hexDigest(byte[] digest) {
StringBuilder b = new StringBuilder(32);
for (int i = 0; i < digest.length; i++) {
int n = digest[i];
b.append("0123456789abcdef".charAt((n >> 4) & 0xF));
b.append("0123456789abcdef".charAt(n & 0xF));
}
return b.toString();
}
/**
* Override of Object.toString to return a string for the MD5 digest without
* finalizing the digest computation. Calling hexDigest() instead will
* finalize the digest computation.
*
* @return the string returned by hexDigest()
*/
@Override
public String toString() {
try {
// MD5 does support cloning, so this should not fail
return hexDigest(((MessageDigest) md.clone()).digest());
} catch (CloneNotSupportedException e) {
// MessageDigest does not support cloning,
// so just return the toString() on the MessageDigest.
return md.toString();
}
}
/**
* Updates the digest with 0 or more bytes.
*
* @param input the array of bytes with which to update the digest
* @see java.security.MessageDigest#update(byte[])
*/
public Fingerprint addBytes(byte[] input) {
md.update(input);
return this;
}
/**
* Updates the digest with the specified number of bytes starting at offset.
*
* @param input the array of bytes with which to update the digest
* @param offset the offset into the array
* @param len the number of bytes to use
* @see java.security.MessageDigest#update(byte[], int, int)
*/
public Fingerprint addBytes(byte[] input, int offset, int len) {
md.update(input, offset, len);
return this;
}
/**
* Updates the digest with a boolean value.
*/
public Fingerprint addBoolean(boolean input) {
addBytes(new byte[] { (byte) (input ? 1 : 0) });
return this;
}
/**
* Updates the digest with a boolean value, correctly handling null.
*/
public Fingerprint addNullableBoolean(Boolean input) {
addInt(input == null ? -1 : (input.booleanValue() ? 1 : 0));
return this;
}
/**
* Updates the digest with the little-endian bytes of a given int value.
*
* @param input the integer with which to update the digest
*/
public Fingerprint addInt(int input) {
md.update(new byte[] {
(byte) input,
(byte) (input >> 8),
(byte) (input >> 16),
(byte) (input >> 24),
});
return this;
}
/**
* Updates the digest with the little-endian bytes of a given long value.
*
* @param input the long with which to update the digest
*/
public Fingerprint addLong(long input) {
md.update(new byte[]{
(byte) input,
(byte) (input >> 8),
(byte) (input >> 16),
(byte) (input >> 24),
(byte) (input >> 32),
(byte) (input >> 40),
(byte) (input >> 48),
(byte) (input >> 56),
});
return this;
}
/**
* Updates the digest with the little-endian bytes of a given int value, correctly distinguishing
* between null and non-null values.
*
* @param input the integer with which to update the digest
*/
public Fingerprint addNullableInt(@Nullable Integer input) {
if (input == null) {
addInt(0);
} else {
addInt(1);
addInt(input);
}
return this;
}
/**
* Updates the digest with a UUID.
*
* @param uuid the UUID with which to update the digest. Must not be null.
*/
public Fingerprint addUUID(UUID uuid) {
addLong(uuid.getLeastSignificantBits());
addLong(uuid.getMostSignificantBits());
return this;
}
/**
* Updates the digest with a String using its length plus its UTF8 encoded bytes.
*
* @param input the String with which to update the digest
* @see java.security.MessageDigest#update(byte[])
*/
public Fingerprint addString(String input) {
byte[] bytes = input.getBytes(UTF_8);
addInt(bytes.length);
md.update(bytes);
return this;
}
/**
* Updates the digest with a String using its length plus its UTF8 encoded bytes; if the string
* is null, then it uses -1 as the length.
*
* @param input the String with which to update the digest
* @see java.security.MessageDigest#update(byte[])
*/
public Fingerprint addNullableString(@Nullable String input) {
if (input == null) {
addInt(-1);
} else {
addString(input);
}
return this;
}
/**
* Updates the digest with a String using its length and content.
*
* @param input the String with which to update the digest
* @see java.security.MessageDigest#update(byte[])
*/
public Fingerprint addStringLatin1(String input) {
addInt(input.length());
byte[] bytes = new byte[input.length()];
for (int i = 0; i < input.length(); i++) {
bytes[i] = (byte) input.charAt(i);
}
md.update(bytes);
return this;
}
/**
* Updates the digest with a Path.
*
* @param input the Path with which to update the digest.
*/
public Fingerprint addPath(Path input) {
addStringLatin1(input.getPathString());
return this;
}
/**
* Updates the digest with a Path.
*
* @param input the Path with which to update the digest.
*/
public Fingerprint addPath(PathFragment input) {
addStringLatin1(input.getPathString());
return this;
}
/**
* Updates the digest with inputs by iterating over them and invoking
* {@code #addString(String)} on each element.
*
* @param inputs the inputs with which to update the digest
*/
public Fingerprint addStrings(Iterable<String> inputs) {
addInt(Iterables.size(inputs));
for (String input : inputs) {
addString(input);
}
return this;
}
/**
* Updates the digest with inputs by iterating over them and invoking
* {@code #addString(String)} on each element.
*
* @param inputs the inputs with which to update the digest
*/
public Fingerprint addStrings(String... inputs) {
addInt(inputs.length);
for (String input : inputs) {
addString(input);
}
return this;
}
/**
* Updates the digest with inputs which are pairs in a map, by iterating over
* the map entries and invoking {@code #addString(String)} on each key and
* value.
*
* @param inputs the inputs in a map with which to update the digest
*/
public Fingerprint addStringMap(Map<String, String> inputs) {
addInt(inputs.size());
for (Map.Entry<String, String> entry : inputs.entrySet()) {
addString(entry.getKey());
addString(entry.getValue());
}
return this;
}
/**
* Updates the digest with a list of paths by iterating over them and
* invoking {@link #addPath(PathFragment)} on each element.
*
* @param inputs the paths with which to update the digest
*/
public Fingerprint addPaths(Iterable<PathFragment> inputs) {
addInt(Iterables.size(inputs));
for (PathFragment path : inputs) {
addPath(path);
}
return this;
}
/**
* Reset the Fingerprint for additional use as though previous digesting had not been done.
*/
public void reset() {
md.reset();
}
// -------- Convenience methods ----------------------------
/**
* Computes the hex digest from a String using UTF8 encoding and returning
* the hexDigest().
*
* @param input the String from which to compute the digest
*/
public static String md5Digest(String input) {
Fingerprint f = new Fingerprint();
f.addBytes(input.getBytes(UTF_8));
return f.hexDigestAndReset();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.PollingConsumerPollStrategy;
/**
* Receive messages from AWS DynamoDB Stream service using AWS SDK version 2.x.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface Ddb2StreamEndpointBuilderFactory {
/**
* Builder for endpoint for the AWS DynamoDB Streams component.
*/
public interface Ddb2StreamEndpointBuilder
extends
EndpointConsumerBuilder {
default AdvancedDdb2StreamEndpointBuilder advanced() {
return (AdvancedDdb2StreamEndpointBuilder) this;
}
/**
* Amazon DynamoDB client to use for all requests for this endpoint.
*
* The option is a:
* <code>software.amazon.awssdk.services.dynamodb.streams.DynamoDbStreamsClient</code> type.
*
* Group: consumer
*
* @param amazonDynamoDbStreamsClient the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder amazonDynamoDbStreamsClient(
Object amazonDynamoDbStreamsClient) {
doSetProperty("amazonDynamoDbStreamsClient", amazonDynamoDbStreamsClient);
return this;
}
/**
* Amazon DynamoDB client to use for all requests for this endpoint.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.dynamodb.streams.DynamoDbStreamsClient</code> type.
*
* Group: consumer
*
* @param amazonDynamoDbStreamsClient the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder amazonDynamoDbStreamsClient(
String amazonDynamoDbStreamsClient) {
doSetProperty("amazonDynamoDbStreamsClient", amazonDynamoDbStreamsClient);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Defines where in the DynaboDB stream to start getting records. Note
* that using TRIM_HORIZON can cause a significant delay before the
* stream has caught up to real-time. if {AT,AFTER}_SEQUENCE_NUMBER are
* used, then a sequenceNumberProvider MUST be supplied.
*
* The option is a:
* <code>software.amazon.awssdk.services.dynamodb.model.ShardIteratorType</code> type.
*
* Default: LATEST
* Group: consumer
*
* @param iteratorType the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder iteratorType(
ShardIteratorType iteratorType) {
doSetProperty("iteratorType", iteratorType);
return this;
}
/**
* Defines where in the DynaboDB stream to start getting records. Note
* that using TRIM_HORIZON can cause a significant delay before the
* stream has caught up to real-time. if {AT,AFTER}_SEQUENCE_NUMBER are
* used, then a sequenceNumberProvider MUST be supplied.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.dynamodb.model.ShardIteratorType</code> type.
*
* Default: LATEST
* Group: consumer
*
* @param iteratorType the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder iteratorType(String iteratorType) {
doSetProperty("iteratorType", iteratorType);
return this;
}
/**
* Maximum number of records that will be fetched in each poll.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*
* @param maxResultsPerRequest the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder maxResultsPerRequest(
int maxResultsPerRequest) {
doSetProperty("maxResultsPerRequest", maxResultsPerRequest);
return this;
}
/**
* Maximum number of records that will be fetched in each poll.
*
* The option will be converted to a <code>int</code> type.
*
* Group: consumer
*
* @param maxResultsPerRequest the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder maxResultsPerRequest(
String maxResultsPerRequest) {
doSetProperty("maxResultsPerRequest", maxResultsPerRequest);
return this;
}
/**
* Set the need for overidding the endpoint. This option needs to be
* used in combination with uriEndpointOverride option.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param overrideEndpoint the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder overrideEndpoint(
boolean overrideEndpoint) {
doSetProperty("overrideEndpoint", overrideEndpoint);
return this;
}
/**
* Set the need for overidding the endpoint. This option needs to be
* used in combination with uriEndpointOverride option.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param overrideEndpoint the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder overrideEndpoint(
String overrideEndpoint) {
doSetProperty("overrideEndpoint", overrideEndpoint);
return this;
}
/**
* To define a proxy host when instantiating the DDBStreams client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder proxyHost(String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* To define a proxy port when instantiating the DDBStreams client.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: consumer
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder proxyPort(Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* To define a proxy port when instantiating the DDBStreams client.
*
* The option will be converted to a
* <code>java.lang.Integer</code> type.
*
* Group: consumer
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder proxyPort(String proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* To define a proxy protocol when instantiating the DDBStreams client.
*
* The option is a:
* <code>software.amazon.awssdk.core.Protocol</code> type.
*
* Default: HTTPS
* Group: consumer
*
* @param proxyProtocol the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder proxyProtocol(Protocol proxyProtocol) {
doSetProperty("proxyProtocol", proxyProtocol);
return this;
}
/**
* To define a proxy protocol when instantiating the DDBStreams client.
*
* The option will be converted to a
* <code>software.amazon.awssdk.core.Protocol</code> type.
*
* Default: HTTPS
* Group: consumer
*
* @param proxyProtocol the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder proxyProtocol(String proxyProtocol) {
doSetProperty("proxyProtocol", proxyProtocol);
return this;
}
/**
* The region in which DDBStreams client needs to work.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param region the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder region(String region) {
doSetProperty("region", region);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder sendEmptyMessageWhenIdle(
boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder sendEmptyMessageWhenIdle(
String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* Provider for the sequence number when using one of the two
* ShardIteratorType.{AT,AFTER}_SEQUENCE_NUMBER iterator types. Can be a
* registry reference or a literal sequence number.
*
* The option is a:
* <code>org.apache.camel.component.aws2.ddbstream.SequenceNumberProvider</code> type.
*
* Group: consumer
*
* @param sequenceNumberProvider the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder sequenceNumberProvider(
Object sequenceNumberProvider) {
doSetProperty("sequenceNumberProvider", sequenceNumberProvider);
return this;
}
/**
* Provider for the sequence number when using one of the two
* ShardIteratorType.{AT,AFTER}_SEQUENCE_NUMBER iterator types. Can be a
* registry reference or a literal sequence number.
*
* The option will be converted to a
* <code>org.apache.camel.component.aws2.ddbstream.SequenceNumberProvider</code> type.
*
* Group: consumer
*
* @param sequenceNumberProvider the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder sequenceNumberProvider(
String sequenceNumberProvider) {
doSetProperty("sequenceNumberProvider", sequenceNumberProvider);
return this;
}
/**
* If we want to trust all certificates in case of overriding the
* endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param trustAllCertificates the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder trustAllCertificates(
boolean trustAllCertificates) {
doSetProperty("trustAllCertificates", trustAllCertificates);
return this;
}
/**
* If we want to trust all certificates in case of overriding the
* endpoint.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param trustAllCertificates the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder trustAllCertificates(
String trustAllCertificates) {
doSetProperty("trustAllCertificates", trustAllCertificates);
return this;
}
/**
* Set the overriding uri endpoint. This option needs to be used in
* combination with overrideEndpoint option.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param uriEndpointOverride the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder uriEndpointOverride(
String uriEndpointOverride) {
doSetProperty("uriEndpointOverride", uriEndpointOverride);
return this;
}
/**
* Set whether the DynamoDB Streams client should expect to load
* credentials through a default credentials provider or to expect
* static credentials to be passed in.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param useDefaultCredentialsProvider the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder useDefaultCredentialsProvider(
boolean useDefaultCredentialsProvider) {
doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider);
return this;
}
/**
* Set whether the DynamoDB Streams client should expect to load
* credentials through a default credentials provider or to expect
* static credentials to be passed in.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: consumer
*
* @param useDefaultCredentialsProvider the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder useDefaultCredentialsProvider(
String useDefaultCredentialsProvider) {
doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffErrorThreshold(
int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffErrorThreshold(
String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffIdleThreshold(
int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffIdleThreshold(
String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffMultiplier(
int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder backoffMultiplier(
String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a:
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder runLoggingLevel(
LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder runLoggingLevel(String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder scheduledExecutorService(
String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a
* <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder schedulerProperties(
String key,
Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param values the values
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder startScheduler(boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a:
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code>
* type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Amazon AWS Access Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param accessKey the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder accessKey(String accessKey) {
doSetProperty("accessKey", accessKey);
return this;
}
/**
* Amazon AWS Secret Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param secretKey the value to set
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder secretKey(String secretKey) {
doSetProperty("secretKey", secretKey);
return this;
}
}
/**
* Advanced builder for endpoint for the AWS DynamoDB Streams component.
*/
public interface AdvancedDdb2StreamEndpointBuilder
extends
EndpointConsumerBuilder {
default Ddb2StreamEndpointBuilder basic() {
return (Ddb2StreamEndpointBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a:
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a:
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder pollStrategy(
PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollStrategy the value to set
* @return the dsl builder
*/
default AdvancedDdb2StreamEndpointBuilder pollStrategy(
String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
}
/**
* Proxy enum for
* <code>software.amazon.awssdk.services.dynamodb.model.ShardIteratorType</code> enum.
*/
enum ShardIteratorType {
TRIM_HORIZON,
LATEST,
AT_SEQUENCE_NUMBER,
AFTER_SEQUENCE_NUMBER,
UNKNOWN_TO_SDK_VERSION;
}
/**
* Proxy enum for <code>software.amazon.awssdk.core.Protocol</code> enum.
*/
enum Protocol {
HTTP,
HTTPS;
}
public interface Ddb2StreamBuilders {
/**
* AWS DynamoDB Streams (camel-aws2-ddb)
* Receive messages from AWS DynamoDB Stream service using AWS SDK
* version 2.x.
*
* Category: cloud,messaging,streams
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ddb
*
* Syntax: <code>aws2-ddbstream:tableName</code>
*
* Path parameter: tableName (required)
* Name of the dynamodb table
*
* @param path tableName
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder aws2Ddbstream(String path) {
return Ddb2StreamEndpointBuilderFactory.endpointBuilder("aws2-ddbstream", path);
}
/**
* AWS DynamoDB Streams (camel-aws2-ddb)
* Receive messages from AWS DynamoDB Stream service using AWS SDK
* version 2.x.
*
* Category: cloud,messaging,streams
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ddb
*
* Syntax: <code>aws2-ddbstream:tableName</code>
*
* Path parameter: tableName (required)
* Name of the dynamodb table
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path tableName
* @return the dsl builder
*/
default Ddb2StreamEndpointBuilder aws2Ddbstream(
String componentName,
String path) {
return Ddb2StreamEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static Ddb2StreamEndpointBuilder endpointBuilder(
String componentName,
String path) {
class Ddb2StreamEndpointBuilderImpl extends AbstractEndpointBuilder implements Ddb2StreamEndpointBuilder, AdvancedDdb2StreamEndpointBuilder {
public Ddb2StreamEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new Ddb2StreamEndpointBuilderImpl(path);
}
}
|
|
package com.github.srec.util;
import com.github.srec.MainMethodRunningException;
import com.github.srec.SRecException;
import com.github.srec.command.ExecutionContext;
import com.github.srec.command.base.CommandSymbol;
import com.github.srec.command.base.LiteralCommand;
import com.github.srec.command.base.ValueCommand;
import com.github.srec.command.base.VarCommand;
import com.github.srec.command.exception.CommandExecutionException;
import com.github.srec.command.value.*;
import com.github.srec.command.value.StringValue;
import groovy.lang.Binding;
import groovy.lang.GString;
import groovy.lang.GroovyShell;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import static org.apache.commons.lang.StringUtils.isBlank;
/**
* @author Victor Tatai
*/
public final class Utils {
/**
* Flag which indicates whether the recorder should scan for labels if the text field has no name.
*/
private static final boolean SCAN_FOR_LABELS = true;
private Utils() {}
public static String quote(String str) {
if (str == null) return null;
return "\"" + str + "\"";
}
public static JLabel getLabelFor(Container container, Component component) {
for (int i = 0; i < container.getComponents().length; i++) {
Component curr = container.getComponents()[i];
if (curr instanceof JLabel && ((JLabel) curr).getLabelFor() == component) return (JLabel) curr;
if (curr instanceof Container) {
JLabel l = getLabelFor((Container) curr, component);
if (l != null) return l;
}
}
return null;
}
public static Map<String, String> parseLocator(String locatorString) {
Map<String, String> map = new HashMap<String, String>();
if (locatorString.indexOf('=') == -1) {
map.put("name", locatorString);
return map;
}
String[] strs = locatorString.split("=");
for (int i = 0; i < strs.length; i = i + 2) {
String key = strs[i].trim();
String value;
if (i + 1 >= strs.length) value = "";
else value = strs[i + 1].trim();
map.put(key, value);
}
return map;
}
public static void runSwingMain(String clName, final String[] args) {
try {
Class cl = Class.forName(clName);
final Method m = cl.getMethod("main", String[].class);
if (!Modifier.isStatic(m.getModifiers())) {
throw new MainMethodRunningException("Incorrect signature for main method");
}
if (!m.getReturnType().equals(void.class)) {
throw new MainMethodRunningException("Incorrect signature for main method");
}
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
try {
m.invoke(null, new Object[]{ args });
} catch (IllegalAccessException e) {
throw new MainMethodRunningException(e);
} catch (InvocationTargetException e) {
throw new MainMethodRunningException(e);
}
}
});
} catch (ClassNotFoundException e) {
throw new MainMethodRunningException(e);
} catch (NoSuchMethodException e) {
throw new MainMethodRunningException(e);
} catch (InvocationTargetException e) {
throw new MainMethodRunningException(e);
} catch (InterruptedException e) {
throw new MainMethodRunningException(e);
}
}
/**
* Close all windows.
*
* @param ignoredWindows The windows to ignore
*/
public static void closeWindows(Window... ignoredWindows) {
Window[] ws = Window.getWindows();
for (Window w : ws) {
if (!contains(ignoredWindows, w)) w.dispose();
}
}
private static boolean contains(Window[] windows, Window w) {
for (Window window : windows) {
if (window == w) return true;
}
return false;
}
public static String getLocator(Component component) {
String locator = component.getName();
if (isBlank(locator) && SCAN_FOR_LABELS) {
JLabel label = getLabelFor(component.getParent(), component);
if (label != null) locator = "label=" + label.getText();
}
return locator;
}
private static int findComponentIndex(Component component) {
for (int i = 0; i < component.getParent().getComponents().length; i++) {
Component candidate = component.getParent().getComponents()[i];
if (candidate == component) return i;
}
throw new SRecException("Could not find component inside its parent");
}
public static String asString(Map<String, Value> parameters) {
if (parameters == null) return "";
StringBuilder strb = new StringBuilder();
for (Map.Entry<String, Value> entry : parameters.entrySet()) {
String parameter = entry.getValue().toString();
strb.append(parameter).append(", ");
}
final String s = strb.toString();
return s.endsWith(", ") ? s.substring(0, s.length() - 2) : s;
}
/**
* Trims all elements inside the array, modifying the original array.
*
* @param strings The string array
* @return The passed string array, trimmed
*/
public static String[] trimArray(String[] strings) {
for (int i = 0; i < strings.length; i++) {
strings[i] = strings[i].trim();
}
return strings;
}
/**
* Utility method useful for creating a parameter map.
*
* @param params The parameters, should be in the format "name", value
* @return The parameters map
*/
public static Map<String, ValueCommand> createParameterMap(Object... params) {
assert params.length % 2 == 0;
Map<String, ValueCommand> ret = new HashMap<String, ValueCommand>();
for (int i = 0; i < params.length; i = i + 2) {
String param = params[i].toString();
Value value = convertFromJava(params[i + 1]);
ret.put(param, new LiteralCommand(value));
}
return ret;
}
/**
* Converts a Java object to a srec value.
*
* @param o The Java object
* @return The srec value
*/
public static Value convertFromJava(Object o) {
if (o instanceof Long) {
return new NumberValue(new BigDecimal((Long) o));
} else if (o instanceof Integer) {
return new NumberValue(new BigDecimal((Integer) o));
} else if (o instanceof Double) {
return new NumberValue(new BigDecimal((Double) o));
} else if (o instanceof Float) {
return new NumberValue(new BigDecimal((Float) o));
} else if (o instanceof String) {
return new StringValue((String) o);
} else if (o instanceof Date) {
return new DateValue((Date) o);
} else if (o instanceof Boolean) {
return BooleanValue.getInstance((Boolean) o);
} else if (o == null) {
return NilValue.getInstance();
}
throw new CommandExecutionException("Could not convert Java object " + o + " to an equivalent srec value");
}
/**
* Converts a srec value to a Java object.
*
* @param value The value
* @return The Java object
*/
public static Object convertToJava(Value value) {
switch (value.getType()) {
case STRING:
return ((StringValue) value).get();
case BOOLEAN:
return ((BooleanValue) value).get();
case NUMBER:
return ((NumberValue) value).get();
case DATE:
return ((DateValue) value).get();
case NIL:
return null;
case OBJECT:
return value.get();
}
throw new CommandExecutionException("Could not convert value " + value + " to an equivalent Java object");
}
/**
* Evaluates an expression using Groovy. All VarCommands inside the context are used in order to evaluate the given
* expression.
*
* @param context The EC
* @param expression The expression to evaluate
* @return The value
*/
public static Object groovyEvaluate(ExecutionContext context, String expression) {
Binding binding = new Binding();
for (Map.Entry<String, CommandSymbol> entry : context.getSymbols().entrySet()) {
final CommandSymbol symbol = entry.getValue();
if (symbol instanceof VarCommand) {
binding.setVariable(entry.getKey(), convertToJava(((VarCommand) symbol).getValue(context)));
}
}
GroovyShell shell = new GroovyShell(binding);
final Object o = shell.evaluate(expression);
if (o instanceof GString) {
return o.toString();
}
return o;
}
/**
* Evaluates an expression using Groovy, converting the final value.
*
* @param context The EC
* @param expression The expression to evaluate
* @return The value converted
*/
public static Value groovyEvaluateConvert(ExecutionContext context, String expression) {
Object obj = groovyEvaluate(context, expression);
return Utils.convertFromJava(obj);
}
}
|
|
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.testsuite.transport.socket;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundByteHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.socket.SocketChannel;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.stream.ChunkedWriteHandler;
import io.netty.testsuite.util.BogusSslContextFactory;
import io.netty.util.concurrent.Future;
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.ssl.SSLEngine;
import org.junit.Test;
public class SocketSslEchoTest extends AbstractSocketTest {
private static final int FIRST_MESSAGE_SIZE = 16384;
private static final Random random = new Random();
static final byte[] data = new byte[1048576];
static {
random.nextBytes(data);
}
@Test
public void testSslEcho() throws Throwable {
run();
}
public void testSslEcho(ServerBootstrap sb, Bootstrap cb) throws Throwable {
testSslEcho0(sb, cb, false);
}
@Test
public void testSslEchoWithChunkHandler() throws Throwable {
run();
}
public void testSslEchoWithChunkHandler(ServerBootstrap sb, Bootstrap cb) throws Throwable {
testSslEcho0(sb, cb, true);
}
private void testSslEcho0(ServerBootstrap sb, Bootstrap cb, final boolean chunkWriteHandler) throws Throwable {
final EchoHandler sh = new EchoHandler(true);
final EchoHandler ch = new EchoHandler(false);
final SSLEngine sse = BogusSslContextFactory.getServerContext().createSSLEngine();
final SSLEngine cse = BogusSslContextFactory.getClientContext().createSSLEngine();
sse.setUseClientMode(false);
cse.setUseClientMode(true);
sb.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel sch) throws Exception {
sch.pipeline().addFirst("ssl", new SslHandler(sse));
if (chunkWriteHandler) {
sch.pipeline().addLast(new ChunkedWriteHandler());
}
sch.pipeline().addLast("handler", sh);
}
});
cb.handler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel sch) throws Exception {
sch.pipeline().addFirst("ssl", new SslHandler(cse));
if (chunkWriteHandler) {
sch.pipeline().addLast(new ChunkedWriteHandler());
}
sch.pipeline().addLast("handler", ch);
}
});
Channel sc = sb.bind().sync().channel();
Channel cc = cb.connect().sync().channel();
Future<Channel> hf = cc.pipeline().get(SslHandler.class).handshakeFuture();
cc.write(Unpooled.wrappedBuffer(data, 0, FIRST_MESSAGE_SIZE));
final AtomicBoolean firstByteWriteFutureDone = new AtomicBoolean();
hf.sync();
assertFalse(firstByteWriteFutureDone.get());
for (int i = FIRST_MESSAGE_SIZE; i < data.length;) {
int length = Math.min(random.nextInt(1024 * 64), data.length - i);
cc.write(Unpooled.wrappedBuffer(data, i, length));
i += length;
}
while (ch.counter < data.length) {
if (sh.exception.get() != null) {
break;
}
if (ch.exception.get() != null) {
break;
}
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// Ignore.
}
}
while (sh.counter < data.length) {
if (sh.exception.get() != null) {
break;
}
if (ch.exception.get() != null) {
break;
}
try {
Thread.sleep(50);
} catch (InterruptedException e) {
// Ignore.
}
}
sh.channel.close().awaitUninterruptibly();
ch.channel.close().awaitUninterruptibly();
sc.close().awaitUninterruptibly();
if (sh.exception.get() != null && !(sh.exception.get() instanceof IOException)) {
throw sh.exception.get();
}
if (ch.exception.get() != null && !(ch.exception.get() instanceof IOException)) {
throw ch.exception.get();
}
if (sh.exception.get() != null) {
throw sh.exception.get();
}
if (ch.exception.get() != null) {
throw ch.exception.get();
}
}
private class EchoHandler extends ChannelInboundByteHandlerAdapter {
volatile Channel channel;
final AtomicReference<Throwable> exception = new AtomicReference<Throwable>();
volatile int counter;
private final boolean server;
EchoHandler(boolean server) {
this.server = server;
}
@Override
public void channelActive(ChannelHandlerContext ctx)
throws Exception {
channel = ctx.channel();
}
@Override
public void inboundBufferUpdated(
ChannelHandlerContext ctx, ByteBuf in)
throws Exception {
byte[] actual = new byte[in.readableBytes()];
in.readBytes(actual);
int lastIdx = counter;
for (int i = 0; i < actual.length; i ++) {
assertEquals(data[i + lastIdx], actual[i]);
}
if (channel.parent() != null) {
channel.write(Unpooled.wrappedBuffer(actual));
}
counter += actual.length;
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx,
Throwable cause) throws Exception {
if (logger.isWarnEnabled()) {
logger.warn(
"Unexpected exception from the " +
(server? "server" : "client") + " side", cause);
}
exception.compareAndSet(null, cause);
ctx.close();
}
}
}
|
|
package com.csy.autolayout.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import com.csy.autolayout.AutoLayoutInfo;
import com.csy.autolayout.R;
import com.csy.autolayout.utils.AutoLayoutHelper;
import com.csy.autolayout.utils.AutoUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
/**
* Created by zhy on 15/12/10.
*
* //do not use
*/
public class MetroLayout extends ViewGroup
{
private final AutoLayoutHelper mHelper = new AutoLayoutHelper(this);
private static class MetroBlock
{
int left;
int top;
int width;
}
private List<MetroBlock> mAvailablePos = new ArrayList<>();
private int mDivider;
public MetroLayout(Context context, AttributeSet attrs)
{
super(context, attrs);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.MetroLayout);
mDivider = a.getDimensionPixelOffset(R.styleable.MetroLayout_metro_divider, 0);
mDivider = AutoUtils.getPercentWidthSizeBigger(mDivider);
a.recycle();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{
if (true)
randomColor();
if (!isInEditMode())
mHelper.adjustChildren();
measureChildren(widthMeasureSpec, heightMeasureSpec);
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
private void randomColor()
{
Random r = new Random(255);
for (int i = 0, n = getChildCount(); i < n; i++)
{
View v = getChildAt(i);
v.setBackgroundColor(Color.argb(100, r.nextInt(), r.nextInt(), r.nextInt()));
}
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b)
{
initAvailablePosition();
int left = 0;
int top = 0;
int divider = mDivider;
for (int i = 0, n = getChildCount(); i < n; i++)
{
View v = getChildAt(i);
if (v.getVisibility() == View.GONE) continue;
MetroBlock newPos = findAvailablePos(v);
left = newPos.left;
top = newPos.top;
int childWidth = v.getMeasuredWidth();
int childHeight = v.getMeasuredHeight();
int right = left + childWidth;
int bottom = top + childHeight;
v.layout(left, top, right, bottom);
if (childWidth + divider < newPos.width)
{
newPos.left += childWidth + divider;
newPos.width -= childWidth + divider;
} else
{
mAvailablePos.remove(newPos);
}
MetroBlock p = new MetroBlock();
p.left = left;
p.top = bottom + divider;
p.width = childWidth;
mAvailablePos.add(p);
mergeAvailablePosition();
}
}
private void mergeAvailablePosition()
{
if (mAvailablePos.size() <= 1) return;
List<MetroBlock> needRemoveBlocks = new ArrayList<>();
MetroBlock one = mAvailablePos.get(0);
MetroBlock two = mAvailablePos.get(1);
for (int i = 1, n = mAvailablePos.size(); i < n - 1; i++)
{
if (one.top == two.top)
{
one.width = one.width + two.width;
needRemoveBlocks.add(one);
two.left = one.left;
two = mAvailablePos.get(i + 1);
} else
{
one = mAvailablePos.get(i);
two = mAvailablePos.get(i + 1);
}
}
mAvailablePos.removeAll(needRemoveBlocks);
}
private void initAvailablePosition()
{
mAvailablePos.clear();
MetroBlock first = new MetroBlock();
first.left = getPaddingLeft();
first.top = getPaddingTop();
first.width = getMeasuredWidth();
mAvailablePos.add(first);
}
private MetroBlock findAvailablePos(View view)
{
MetroBlock p = new MetroBlock();
if (mAvailablePos.size() == 0)
{
p.left = getPaddingLeft();
p.top = getPaddingTop();
p.width = getMeasuredWidth();
return p;
}
int min = mAvailablePos.get(0).top;
MetroBlock minHeightPos = mAvailablePos.get(0);
for (MetroBlock _p : mAvailablePos)
{
if (_p.top < min)
{
min = _p.top;
minHeightPos = _p;
}
}
return minHeightPos;
}
@Override
public LayoutParams generateLayoutParams(AttributeSet attrs)
{
return new LayoutParams(getContext(), attrs);
}
public static class LayoutParams extends MarginLayoutParams
implements AutoLayoutHelper.AutoLayoutParams
{
private AutoLayoutInfo mAutoLayoutInfo;
public LayoutParams(Context c, AttributeSet attrs)
{
super(c, attrs);
mAutoLayoutInfo = AutoLayoutHelper.getAutoLayoutInfo(c, attrs);
}
public LayoutParams(int width, int height)
{
super(width, height);
}
public LayoutParams(ViewGroup.LayoutParams source)
{
super(source);
}
public LayoutParams(MarginLayoutParams source)
{
super(source);
}
public LayoutParams(LayoutParams source)
{
this((ViewGroup.LayoutParams) source);
mAutoLayoutInfo = source.mAutoLayoutInfo;
}
@Override
public AutoLayoutInfo getAutoLayoutInfo()
{
return mAutoLayoutInfo;
}
}
}
|
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.communication.implementation;
import com.azure.core.http.rest.Response;
import com.azure.core.management.Region;
import com.azure.core.management.SystemData;
import com.azure.core.util.Context;
import com.azure.resourcemanager.communication.fluent.models.CommunicationServiceResourceInner;
import com.azure.resourcemanager.communication.models.CommunicationServiceKeys;
import com.azure.resourcemanager.communication.models.CommunicationServiceResource;
import com.azure.resourcemanager.communication.models.LinkNotificationHubParameters;
import com.azure.resourcemanager.communication.models.LinkedNotificationHub;
import com.azure.resourcemanager.communication.models.ProvisioningState;
import com.azure.resourcemanager.communication.models.RegenerateKeyParameters;
import java.util.Collections;
import java.util.Map;
public final class CommunicationServiceResourceImpl
implements CommunicationServiceResource,
CommunicationServiceResource.Definition,
CommunicationServiceResource.Update {
private CommunicationServiceResourceInner innerObject;
private final com.azure.resourcemanager.communication.CommunicationManager serviceManager;
public String id() {
return this.innerModel().id();
}
public String name() {
return this.innerModel().name();
}
public String type() {
return this.innerModel().type();
}
public SystemData systemData() {
return this.innerModel().systemData();
}
public String location() {
return this.innerModel().location();
}
public Map<String, String> tags() {
Map<String, String> inner = this.innerModel().tags();
if (inner != null) {
return Collections.unmodifiableMap(inner);
} else {
return Collections.emptyMap();
}
}
public ProvisioningState provisioningState() {
return this.innerModel().provisioningState();
}
public String hostname() {
return this.innerModel().hostname();
}
public String dataLocation() {
return this.innerModel().dataLocation();
}
public String notificationHubId() {
return this.innerModel().notificationHubId();
}
public String version() {
return this.innerModel().version();
}
public String immutableResourceId() {
return this.innerModel().immutableResourceId();
}
public Region region() {
return Region.fromName(this.regionName());
}
public String regionName() {
return this.location();
}
public CommunicationServiceResourceInner innerModel() {
return this.innerObject;
}
private com.azure.resourcemanager.communication.CommunicationManager manager() {
return this.serviceManager;
}
private String resourceGroupName;
private String communicationServiceName;
public CommunicationServiceResourceImpl withExistingResourceGroup(String resourceGroupName) {
this.resourceGroupName = resourceGroupName;
return this;
}
public CommunicationServiceResource create() {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.createOrUpdate(resourceGroupName, communicationServiceName, this.innerModel(), Context.NONE);
return this;
}
public CommunicationServiceResource create(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.createOrUpdate(resourceGroupName, communicationServiceName, this.innerModel(), context);
return this;
}
CommunicationServiceResourceImpl(
String name, com.azure.resourcemanager.communication.CommunicationManager serviceManager) {
this.innerObject = new CommunicationServiceResourceInner();
this.serviceManager = serviceManager;
this.communicationServiceName = name;
}
public CommunicationServiceResourceImpl update() {
return this;
}
public CommunicationServiceResource apply() {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.updateWithResponse(resourceGroupName, communicationServiceName, this.innerModel(), Context.NONE)
.getValue();
return this;
}
public CommunicationServiceResource apply(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.updateWithResponse(resourceGroupName, communicationServiceName, this.innerModel(), context)
.getValue();
return this;
}
CommunicationServiceResourceImpl(
CommunicationServiceResourceInner innerObject,
com.azure.resourcemanager.communication.CommunicationManager serviceManager) {
this.innerObject = innerObject;
this.serviceManager = serviceManager;
this.resourceGroupName = Utils.getValueFromIdByName(innerObject.id(), "resourceGroups");
this.communicationServiceName = Utils.getValueFromIdByName(innerObject.id(), "communicationServices");
}
public CommunicationServiceResource refresh() {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.getByResourceGroupWithResponse(resourceGroupName, communicationServiceName, Context.NONE)
.getValue();
return this;
}
public CommunicationServiceResource refresh(Context context) {
this.innerObject =
serviceManager
.serviceClient()
.getCommunicationServices()
.getByResourceGroupWithResponse(resourceGroupName, communicationServiceName, context)
.getValue();
return this;
}
public LinkedNotificationHub linkNotificationHub() {
return serviceManager.communicationServices().linkNotificationHub(resourceGroupName, communicationServiceName);
}
public Response<LinkedNotificationHub> linkNotificationHubWithResponse(
LinkNotificationHubParameters linkNotificationHubParameters, Context context) {
return serviceManager
.communicationServices()
.linkNotificationHubWithResponse(
resourceGroupName, communicationServiceName, linkNotificationHubParameters, context);
}
public CommunicationServiceKeys listKeys() {
return serviceManager.communicationServices().listKeys(resourceGroupName, communicationServiceName);
}
public Response<CommunicationServiceKeys> listKeysWithResponse(Context context) {
return serviceManager
.communicationServices()
.listKeysWithResponse(resourceGroupName, communicationServiceName, context);
}
public CommunicationServiceKeys regenerateKey(RegenerateKeyParameters parameters) {
return serviceManager
.communicationServices()
.regenerateKey(resourceGroupName, communicationServiceName, parameters);
}
public Response<CommunicationServiceKeys> regenerateKeyWithResponse(
RegenerateKeyParameters parameters, Context context) {
return serviceManager
.communicationServices()
.regenerateKeyWithResponse(resourceGroupName, communicationServiceName, parameters, context);
}
public CommunicationServiceResourceImpl withRegion(Region location) {
this.innerModel().withLocation(location.toString());
return this;
}
public CommunicationServiceResourceImpl withRegion(String location) {
this.innerModel().withLocation(location);
return this;
}
public CommunicationServiceResourceImpl withTags(Map<String, String> tags) {
this.innerModel().withTags(tags);
return this;
}
public CommunicationServiceResourceImpl withDataLocation(String dataLocation) {
this.innerModel().withDataLocation(dataLocation);
return this;
}
}
|
|
package com.github.crazyorr.rollinglogger;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* RollingLogger is for writing logs into a fix number of files in rolling
* order, in case the log files' size grows immensely. You can specify the path
* to store the log files, the name of log files, the maximum size of each
* individual log file, as well as the maximum log file count.
*
* @author Lei Wang
*
*/
public class RollingLogger {
/**
* Separator between the log file name and its index
*/
private static final String COUNT_SEPARATOR = "_";
/**
* Line separator
*/
private static final String LINE_SEPARATOR = System
.getProperty("line.separator");
/**
* Log file directory
*/
private String mDir;
/**
* Log file name
*/
private String mName;
/**
* Individual log file's maximum size (in bytes)
*/
private long mFileMaxSize;
/**
* Maximum log file count
*/
private int mMaxFileCount;
/**
* Current editing file
*/
private File mFile;
/**
* Construct a RollingLogger
*
* @param dir
* Log file directory
* @param name
* Log file name
* @param fileMaxSize
* Individual log file's maximum size (in bytes)
* @param maxfileCount
* Maximum log file count
*/
public RollingLogger(String dir, String name, long fileMaxSize,
int maxfileCount) {
if (fileMaxSize <= 0) {
throw new IllegalArgumentException("file size must > 0 byte");
}
if (maxfileCount <= 0) {
throw new IllegalArgumentException("file count must > 0");
}
mDir = dir;
mName = name;
mFileMaxSize = fileMaxSize;
mMaxFileCount = maxfileCount;
File dirFile = new File(mDir);
if (!dirFile.exists()) {
dirFile.mkdirs();
}
createNewFile();
}
private void createNewFile() {
mFile = new File(mDir, addIndexToLogFileName(mName, 0));
}
private String addIndexToLogFileName(String fileName, int index) {
return fileName + COUNT_SEPARATOR + index;
}
/**
* Write a log. (Synchronized)
*
* @param log
* The log content
* @throws IOException
*/
public synchronized void writeLog(String log) throws IOException {
if (mFile.length() < mFileMaxSize) {
writeLogToFile(log, mFile, true);
} else {
rotateLogFiles();
createNewFile();
writeLogToFile(log, mFile, false);
}
}
/**
* Write a log in a line. (Synchronized)
*
* @param log
* The log content
* @throws IOException
*/
public synchronized void writeLogLine(String log) throws IOException {
writeLog(log + LINE_SEPARATOR);
}
private void writeLogToFile(String log, File file, boolean append)
throws IOException {
try {
FileOutputStream fos = new FileOutputStream(file, append);
fos.write(log.getBytes());
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
file.getParentFile().mkdirs();
} catch (IOException e) {
throw e;
}
}
/**
* Rotate log files, move each file at <code>index</code> to
* <code>index+1</code>, if <code>index+1</code> is larger than the maximum
* log file count, then the file at <code>index+1</code> is discarded.
* In this way the number of log files never exceeds the limit.
*
*/
private void rotateLogFiles() {
File[] files = getFilesList();
if ((files != null) && (files.length > 0)) {
List<File> fileList = Arrays.asList(files);
Collections.sort(fileList, new Comparator<File>() {
@Override
public int compare(File lhs, File rhs) {
return getLogFileIndex(lhs.getName())
- getLogFileIndex(rhs.getName());
}
});
int count = fileList.size();
for (int index = count - 1; index >= 0; index--) {
File file = fileList.get(index);
boolean isSuccess = true;
if (index < count - 1) {
File nextfile = fileList.get(index + 1);
// Delete the next file and rename the file with it
if(nextfile.exists()){
nextfile.delete();
}
isSuccess = file.renameTo(nextfile);
} else {
// The log file count doesn't reach maximum
if (count < mMaxFileCount) {
// Create a new file at the end
isSuccess = file.renameTo(new File(mDir,
addIndexToLogFileName(mName, index + 1)));
}
}
if (!isSuccess) {
throw new RuntimeException("Rotating log files failed.");
}
}
}
}
private File[] getFilesList() {
File dir = new File(mDir);
return dir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith(mName);
}
});
}
/**
* Get the log file index from the composed log file name.
*
* @param fileName
* Log file name with index appended
* @return Log file index
*/
private int getLogFileIndex(String fileName) {
int fileIndex = 0;
try {
fileIndex = Integer.parseInt(fileName.substring(fileName
.lastIndexOf(COUNT_SEPARATOR) + 1));
} catch (NumberFormatException e) {
e.printStackTrace();
}
return fileIndex;
}
}
|
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.security.management.client.widgets.management.list;
import javax.annotation.PostConstruct;
import javax.enterprise.context.Dependent;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import org.gwtbootstrap3.client.ui.AnchorListItem;
import org.gwtbootstrap3.client.ui.Badge;
import org.gwtbootstrap3.client.ui.CheckBox;
import org.gwtbootstrap3.client.ui.Heading;
import org.gwtbootstrap3.client.ui.Label;
import org.gwtbootstrap3.client.ui.LinkedGroup;
import org.gwtbootstrap3.client.ui.LinkedGroupItem;
import org.gwtbootstrap3.client.ui.Pagination;
import org.gwtbootstrap3.client.ui.Row;
import org.gwtbootstrap3.client.ui.constants.ButtonSize;
import org.gwtbootstrap3.client.ui.constants.ButtonType;
import org.gwtbootstrap3.client.ui.constants.HeadingSize;
import org.gwtbootstrap3.client.ui.constants.IconPosition;
import org.uberfire.ext.security.management.client.resources.i18n.UsersManagementWidgetsConstants;
/**
* <p>View implementation for listing entities with pagination features.</p>
* @since 0.8.0
*/
@Dependent
public class EntitiesListView extends Composite
implements
EntitiesList.View {
private static EntitiesListViewBinder uiBinder = GWT.create(EntitiesListViewBinder.class);
@UiField
EntitiesListViewStyle style;
@UiField
Row emptyEntitiesRow;
@UiField
Label emptyEntitiesLabel;
@UiField
LinkedGroup entitiesList;
@UiField
Pagination pagination;
@UiField
AnchorListItem firstPageAnchor;
@UiField
AnchorListItem prevPageAnchor;
@UiField
AnchorListItem currentPageAnchor;
@UiField
AnchorListItem nextPageAnchor;
@UiField
AnchorListItem lastPageAnchor;
@UiField
Badge totalBadge;
@UiField
HTML totalText;
private EntitiesList presenter;
private HandlerRegistration firstPageAnchorClickHandlerRegistration = null;
private HandlerRegistration prevPageAnchorClickHandlerRegistration = null;
private HandlerRegistration nextPageAnchorClickHandlerRegistration = null;
private HandlerRegistration lastPageAnchorClickHandlerRegistration = null;
@PostConstruct
protected void initUIBinder() {
initWidget(uiBinder.createAndBindUi(this));
}
@Override
public void init(final EntitiesList presenter) {
this.presenter = presenter;
}
@Override
public EntitiesList.View configure(final String emptyEntitiesText,
final EntitiesList.PaginationConstraints paginationConstraints) {
clear();
final String emptyText = emptyEntitiesText != null ? emptyEntitiesText : UsersManagementWidgetsConstants.INSTANCE.emptyEntities();
emptyEntitiesLabel.setText(emptyText);
applyPaginationConstraints(paginationConstraints);
return this;
}
@Override
public EntitiesList.View add(final int index,
final String identifier,
final String title,
final HeadingSize titleSize,
final boolean canRead,
final boolean canRemove,
final boolean canSelect,
final boolean isSelected) {
addEntityInList(index,
identifier,
title,
titleSize,
canRead,
canRemove,
canSelect,
isSelected);
emptyEntitiesRow.setVisible(false);
return this;
}
@Override
public EntitiesList.View clear() {
entitiesList.clear();
emptyEntitiesRow.setVisible(true);
return this;
}
private void addEntityInList(final int index,
final String id,
final String title,
final HeadingSize titleSize,
final boolean canRead,
final boolean canRemove,
final boolean canSelect,
final boolean isSelected) {
final LinkedGroupItem groupItem = new LinkedGroupItem();
groupItem.addStyleName(style.entityGroup());
if (canRead) {
groupItem.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent clickEvent) {
presenter.onReadEntity(id);
}
});
}
final HorizontalPanel groupPanel = new HorizontalPanel();
groupPanel.setVerticalAlignment(HasVerticalAlignment.ALIGN_MIDDLE);
groupPanel.addStyleName(style.entityPanel());
// Entity selection feature.
if (canSelect) {
final CheckBox checkBox = new CheckBox();
checkBox.addStyleName(style.entityListButton());
checkBox.addStyleName(style.left());
checkBox.setValue(isSelected);
checkBox.addDomHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
clickEvent.stopPropagation();
final boolean value = checkBox.getValue();
presenter.onSelectEntity(id,
index,
checkBox.getValue());
}
},
ClickEvent.getType());
groupPanel.add(checkBox);
}
// Entity title.
final Heading heading = new Heading(titleSize);
heading.setText(title);
heading.addStyleName(style.entityListTitle());
groupPanel.add(heading);
// Entity remove from list feature.
if (canRemove) {
// The remove button.
final org.gwtbootstrap3.client.ui.Button removeButton = new org.gwtbootstrap3.client.ui.Button();
removeButton.addStyleName(style.entityListButton());
removeButton.setSize(ButtonSize.EXTRA_SMALL);
removeButton.setIconPosition(IconPosition.RIGHT);
removeButton.setType(ButtonType.DEFAULT);
removeButton.setText(UsersManagementWidgetsConstants.INSTANCE.remove());
removeButton.setTitle(UsersManagementWidgetsConstants.INSTANCE.remove());
removeButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent clickEvent) {
presenter.onRemoveEntity(id);
}
});
removeButton.setVisible(false);
groupPanel.add(removeButton);
// Show the button on mouse over.
groupItem.addDomHandler(new MouseOverHandler() {
@Override
public void onMouseOver(final MouseOverEvent mouseOverEvent) {
mouseOverEvent.stopPropagation();
removeButton.setVisible(true);
}
},
MouseOverEvent.getType());
// Hide the button on mouse over.
groupItem.addDomHandler(new MouseOutHandler() {
@Override
public void onMouseOut(final MouseOutEvent mouseOverEvent) {
mouseOverEvent.stopPropagation();
removeButton.setVisible(false);
}
},
MouseOutEvent.getType());
}
groupItem.add(groupPanel);
entitiesList.add(groupItem);
}
private void applyPaginationConstraints(final EntitiesList.PaginationConstraints constraints) {
boolean existsPagination = constraints != null;
if (existsPagination) {
final boolean isFirstPageEnabled = constraints.isFirstPageEnabled();
final boolean isFirstPageVisible = constraints.isFirstPageVisible();
final boolean isPrevPageEnabled = constraints.isPrevPageEnabled();
final boolean isPrevPageVisible = constraints.isPrevPageVisible();
final int currentPage = constraints.getCurrentPage();
final boolean isNextPageEnabled = constraints.isNextPageEnabled();
final boolean isNextPageVisible = constraints.isNextPageVisible();
final boolean isLastPageEnabled = constraints.isLastPageEnabled();
final boolean isLastPageVisible = constraints.isLastPageVisible();
final Integer total = constraints.getTotal();
// Only show pagination if necesssary.
existsPagination = isPrevPageVisible || isNextPageVisible;
if (existsPagination) {
// First page anchor.
firstPageAnchor.setEnabled(isFirstPageEnabled);
firstPageAnchor.setVisible(isFirstPageVisible);
if (firstPageAnchorClickHandlerRegistration != null) {
firstPageAnchorClickHandlerRegistration.removeHandler();
}
;
firstPageAnchorClickHandlerRegistration = firstPageAnchor.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent clickEvent) {
clickEvent.stopPropagation();
if (isFirstPageEnabled) {
presenter.onGoToFirstPage();
}
}
});
// Previous page anchor.
prevPageAnchor.setEnabled(isPrevPageEnabled);
prevPageAnchor.setVisible(isPrevPageVisible);
if (prevPageAnchorClickHandlerRegistration != null) {
prevPageAnchorClickHandlerRegistration.removeHandler();
}
;
prevPageAnchorClickHandlerRegistration = prevPageAnchor.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent clickEvent) {
clickEvent.stopPropagation();
if (isPrevPageEnabled) {
presenter.onGoToPrevPage();
}
}
});
// Current page anchor.
currentPageAnchor.setText(Integer.toString(currentPage));
// Next page anchor.
nextPageAnchor.setEnabled(isNextPageEnabled);
nextPageAnchor.setVisible(isNextPageVisible);
if (nextPageAnchorClickHandlerRegistration != null) {
nextPageAnchorClickHandlerRegistration.removeHandler();
}
;
nextPageAnchorClickHandlerRegistration = nextPageAnchor.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
clickEvent.stopPropagation();
if (isNextPageEnabled) {
presenter.onGoToNextPage();
}
}
});
// Last page anchor.
lastPageAnchor.setEnabled(isLastPageEnabled);
lastPageAnchor.setVisible(isLastPageVisible);
if (lastPageAnchorClickHandlerRegistration != null) {
lastPageAnchorClickHandlerRegistration.removeHandler();
}
;
lastPageAnchorClickHandlerRegistration = lastPageAnchor.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent clickEvent) {
clickEvent.stopPropagation();
if (isLastPageEnabled) {
presenter.onGoToLastPage();
}
}
});
}
// Show total if available.
if (total == null || total == 0) {
totalBadge.setVisible(false);
} else {
final String t = UsersManagementWidgetsConstants.INSTANCE.total() + " " + total.toString() + " " + presenter.getEntityType();
totalText.setText(t);
totalBadge.setVisible(true);
}
}
pagination.setVisible(existsPagination);
}
interface EntitiesListViewBinder
extends
UiBinder<Row, EntitiesListView> {
}
interface EntitiesListViewStyle extends CssResource {
String entityPanel();
String entityGroup();
String entitiesList();
String entityListTitle();
String entityListButton();
String left();
}
}
|
|
package org.apache.cassandra.db.compaction;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.DataOutput;
import java.io.IOError;
import java.io.IOException;
import java.security.MessageDigest;
import java.util.Iterator;
import java.util.List;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.columniterator.IColumnIterator;
import org.apache.cassandra.db.columniterator.ICountableColumnIterator;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.io.util.IIterableColumns;
import org.apache.cassandra.utils.MergeIterator;
/**
* LazilyCompactedRow only computes the row bloom filter and column index in memory
* (at construction time); it does this by reading one column at a time from each
* of the rows being compacted, and merging them as it does so. So the most we have
* in memory at a time is the bloom filter, the index, and one column from each
* pre-compaction row.
*
* When write() or update() is called, a second pass is made over the pre-compaction
* rows to write the merged columns or update the hash, again with at most one column
* from each row deserialized at a time.
*/
public class LazilyCompactedRow extends AbstractCompactedRow implements IIterableColumns
{
private static Logger logger = LoggerFactory.getLogger(LazilyCompactedRow.class);
private final List<? extends ICountableColumnIterator> rows;
private final CompactionController controller;
private final boolean shouldPurge;
private final DataOutputBuffer headerBuffer;
private ColumnFamily emptyColumnFamily;
private Reducer reducer;
private int columnCount;
private long maxTimestamp;
private long columnSerializedSize;
private boolean closed;
public LazilyCompactedRow(CompactionController controller, List<? extends ICountableColumnIterator> rows)
{
super(rows.get(0).getKey());
this.rows = rows;
this.controller = controller;
this.shouldPurge = controller.shouldPurge(key);
for (IColumnIterator row : rows)
{
ColumnFamily cf = row.getColumnFamily();
if (emptyColumnFamily == null)
emptyColumnFamily = cf;
else
emptyColumnFamily.delete(cf);
}
// initialize row header so isEmpty can be called
headerBuffer = new DataOutputBuffer();
ColumnIndexer.serialize(this, headerBuffer);
// reach into the reducer used during iteration to get column count, size, max column timestamp
// (however, if there are zero columns, iterator() will not be called by ColumnIndexer and reducer will be null)
columnCount = reducer == null ? 0 : reducer.size;
columnSerializedSize = reducer == null ? 0 : reducer.serializedSize;
maxTimestamp = reducer == null ? Long.MIN_VALUE : reducer.maxTimestampSeen;
reducer = null;
}
public long write(DataOutput out) throws IOException
{
assert !closed;
DataOutputBuffer clockOut = new DataOutputBuffer();
ColumnFamily.serializer().serializeCFInfo(emptyColumnFamily, clockOut);
long dataSize = headerBuffer.getLength() + clockOut.getLength() + columnSerializedSize;
if (logger.isDebugEnabled())
logger.debug(String.format("header / clock / column sizes are %s / %s / %s",
headerBuffer.getLength(), clockOut.getLength(), columnSerializedSize));
assert dataSize > 0;
out.writeLong(dataSize);
out.write(headerBuffer.getData(), 0, headerBuffer.getLength());
out.write(clockOut.getData(), 0, clockOut.getLength());
out.writeInt(columnCount);
Iterator<IColumn> iter = iterator();
while (iter.hasNext())
{
IColumn column = iter.next();
emptyColumnFamily.getColumnSerializer().serialize(column, out);
}
long secondPassColumnSize = reducer == null ? 0 : reducer.serializedSize;
assert secondPassColumnSize == columnSerializedSize
: "originally calculated column size of " + columnSerializedSize + " but now it is " + secondPassColumnSize;
close();
return dataSize;
}
public void update(MessageDigest digest)
{
assert !closed;
// no special-case for rows.size == 1, we're actually skipping some bytes here so just
// blindly updating everything wouldn't be correct
DataOutputBuffer out = new DataOutputBuffer();
try
{
ColumnFamily.serializer().serializeCFInfo(emptyColumnFamily, out);
out.writeInt(columnCount);
digest.update(out.getData(), 0, out.getLength());
}
catch (IOException e)
{
throw new IOError(e);
}
Iterator<IColumn> iter = iterator();
while (iter.hasNext())
{
iter.next().updateDigest(digest);
}
close();
}
public boolean isEmpty()
{
boolean cfIrrelevant = shouldPurge
? ColumnFamilyStore.removeDeletedCF(emptyColumnFamily, controller.gcBefore) == null
: !emptyColumnFamily.isMarkedForDelete(); // tombstones are relevant
return cfIrrelevant && columnCount == 0;
}
public int getEstimatedColumnCount()
{
int n = 0;
for (ICountableColumnIterator row : rows)
n += row.getColumnCount();
return n;
}
public AbstractType<?> getComparator()
{
return emptyColumnFamily.getComparator();
}
public Iterator<IColumn> iterator()
{
for (ICountableColumnIterator row : rows)
row.reset();
reducer = new Reducer();
Iterator<IColumn> iter = MergeIterator.get(rows, getComparator().columnComparator, reducer);
return Iterators.filter(iter, Predicates.notNull());
}
public int columnCount()
{
return columnCount;
}
public long maxTimestamp()
{
return maxTimestamp;
}
private void close()
{
for (IColumnIterator row : rows)
{
try
{
row.close();
}
catch (IOException e)
{
throw new IOError(e);
}
}
closed = true;
}
private class Reducer extends MergeIterator.Reducer<IColumn, IColumn>
{
ColumnFamily container = emptyColumnFamily.cloneMeShallow();
long serializedSize = 4; // int for column count
int size = 0;
long maxTimestampSeen = Long.MIN_VALUE;
public void reduce(IColumn current)
{
container.addColumn(current);
}
protected IColumn getReduced()
{
ColumnFamily purged = PrecompactedRow.removeDeletedAndOldShards(key, shouldPurge, controller, container);
if (purged == null || !purged.iterator().hasNext())
{
container.clear();
return null;
}
IColumn reduced = purged.iterator().next();
container.clear();
serializedSize += reduced.serializedSize();
size++;
maxTimestampSeen = Math.max(maxTimestampSeen, reduced.maxTimestamp());
return reduced;
}
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Expression;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.processor.saga.SagaProcessorBuilder;
import org.apache.camel.saga.CamelSagaService;
import org.apache.camel.saga.CamelSagaStep;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Enables sagas on the route
*
* @version
*/
@Metadata(label = "eip,routing")
@XmlRootElement(name = "saga")
@XmlAccessorType(XmlAccessType.FIELD)
public class SagaDefinition extends OutputDefinition<SagaDefinition> {
private static final Logger LOG = LoggerFactory.getLogger(SagaDefinition.class);
@XmlAttribute
@Metadata(defaultValue = "REQUIRED")
private SagaPropagation propagation;
@XmlAttribute
@Metadata(defaultValue = "AUTO")
private SagaCompletionMode completionMode;
@XmlAttribute
private Long timeoutInMilliseconds;
@XmlElement
private SagaActionUriDefinition compensation;
@XmlElement
private SagaActionUriDefinition completion;
@XmlElement(name = "option")
private List<SagaOptionDefinition> options;
@XmlTransient
private CamelSagaService sagaService; // TODO add ref for xml configuration
public SagaDefinition() {
}
@Override
public Processor createProcessor(RouteContext routeContext) throws Exception {
Optional<Endpoint> compensationEndpoint = Optional.ofNullable(this.compensation)
.map(SagaActionUriDefinition::getUri)
.map(routeContext::resolveEndpoint);
Optional<Endpoint> completionEndpoint = Optional.ofNullable(this.completion)
.map(SagaActionUriDefinition::getUri)
.map(routeContext::resolveEndpoint);
Map<String, Expression> optionsMap = new TreeMap<>();
if (this.options != null) {
for (SagaOptionDefinition optionDef : this.options) {
String optionName = optionDef.getOptionName();
Expression expr = optionDef.getExpression();
optionsMap.put(optionName, expr);
}
}
CamelSagaStep step = new CamelSagaStep(compensationEndpoint, completionEndpoint, optionsMap, Optional.ofNullable(timeoutInMilliseconds));
SagaPropagation propagation = this.propagation;
if (propagation == null) {
// default propagation mode
propagation = SagaPropagation.REQUIRED;
}
SagaCompletionMode completionMode = this.completionMode;
if (completionMode == null) {
// default completion mode
completionMode = SagaCompletionMode.defaultCompletionMode();
}
Processor childProcessor = this.createChildProcessor(routeContext, true);
CamelSagaService camelSagaService = findSagaService(routeContext.getCamelContext());
camelSagaService.registerStep(step);
return new SagaProcessorBuilder()
.camelContext(routeContext.getCamelContext())
.childProcessor(childProcessor)
.sagaService(camelSagaService)
.step(step)
.propagation(propagation)
.completionMode(completionMode)
.build();
}
@Override
public boolean isAbstract() {
return true;
}
@Override
public boolean isTopLevelOnly() {
return true;
}
@Override
public boolean isWrappingEntireOutput() {
return true;
}
@Override
public String getLabel() {
String desc = description();
if (ObjectHelper.isEmpty(desc)) {
return "saga";
} else {
return "saga[" + desc + "]";
}
}
@Override
public String toString() {
String desc = description();
if (ObjectHelper.isEmpty(desc)) {
return "Saga -> [" + outputs + "]";
} else {
return "Saga[" + desc + "] -> [" + outputs + "]";
}
}
// Properties
public SagaActionUriDefinition getCompensation() {
return compensation;
}
/**
* The compensation endpoint URI that must be called to compensate all changes done in the route.
* The route corresponding to the compensation URI must perform compensation and complete without error.
*
* If errors occur during compensation, the saga service may call again the compensation URI to retry.
*/
public void setCompensation(SagaActionUriDefinition compensation) {
this.compensation = compensation;
}
public SagaActionUriDefinition getCompletion() {
return completion;
}
/**
* The completion endpoint URI that will be called when the Saga is completed successfully.
* The route corresponding to the completion URI must perform completion tasks and terminate without error.
*
* If errors occur during completion, the saga service may call again the completion URI to retry.
*/
public void setCompletion(SagaActionUriDefinition completion) {
this.completion = completion;
}
public SagaPropagation getPropagation() {
return propagation;
}
/**
* Set the Saga propagation mode (REQUIRED, REQUIRES_NEW, MANDATORY, SUPPORTS, NOT_SUPPORTED, NEVER).
*/
public void setPropagation(SagaPropagation propagation) {
this.propagation = propagation;
}
public SagaCompletionMode getCompletionMode() {
return completionMode;
}
/**
* Determine how the saga should be considered complete. When set to AUTO, the saga is completed when the exchange that
* initiates the saga is processed successfully, or compensated when it completes exceptionally.
*
* When set to MANUAL, the user must complete or compensate the saga using the "saga:complete" or "saga:compensate" endpoints.
*/
public void setCompletionMode(SagaCompletionMode completionMode) {
this.completionMode = completionMode;
}
public CamelSagaService getSagaService() {
return sagaService;
}
public void setSagaService(CamelSagaService sagaService) {
this.sagaService = sagaService;
}
public List<SagaOptionDefinition> getOptions() {
return options;
}
/**
* Allows to save properties of the current exchange in order to re-use them in a compensation/completion callback route.
* Options are usually helpful e.g. to store and retrieve identifiers of objects that should be deleted in compensating actions.
*
* Option values will be transformed into input headers of the compensation/completion exchange.
*/
public void setOptions(List<SagaOptionDefinition> options) {
this.options = options;
}
public Long getTimeoutInMilliseconds() {
return timeoutInMilliseconds;
}
/**
* Set the maximum amount of time for the Saga. After the timeout is expired, the saga will be compensated
* automatically (unless a different decision has been taken in the meantime).
*/
public void setTimeoutInMilliseconds(Long timeoutInMilliseconds) {
this.timeoutInMilliseconds = timeoutInMilliseconds;
}
private void addOption(String option, Expression expression) {
if (this.options == null) {
this.options = new ArrayList<>();
}
this.options.add(new SagaOptionDefinition(option, expression));
}
// Builders
public SagaDefinition compensation(String compensation) {
if (this.compensation != null) {
throw new IllegalStateException("Compensation has already been set");
}
this.compensation = new SagaActionUriDefinition(compensation);
return this;
}
public SagaDefinition completion(String completion) {
if (this.completion != null) {
throw new IllegalStateException("Completion has already been set");
}
this.completion = new SagaActionUriDefinition(completion);
return this;
}
public SagaDefinition propagation(SagaPropagation propagation) {
setPropagation(propagation);
return this;
}
public SagaDefinition sagaService(CamelSagaService sagaService) {
setSagaService(sagaService);
return this;
}
public SagaDefinition completionMode(SagaCompletionMode completionMode) {
setCompletionMode(completionMode);
return this;
}
public SagaDefinition option(String option, Expression expression) {
addOption(option, expression);
return this;
}
public SagaDefinition timeout(long timeout, TimeUnit unit) {
setTimeoutInMilliseconds(unit.toMillis(timeout));
return this;
}
// Utils
protected CamelSagaService findSagaService(CamelContext context) {
CamelSagaService sagaService = getSagaService();
if (sagaService != null) {
return sagaService;
}
sagaService = context.hasService(CamelSagaService.class);
if (sagaService != null) {
return sagaService;
}
sagaService = CamelContextHelper.findByType(context, CamelSagaService.class);
if (sagaService != null) {
return sagaService;
}
throw new RuntimeCamelException("Cannot find a CamelSagaService");
}
protected String description() {
StringBuilder desc = new StringBuilder();
addField(desc, "compensation", compensation);
addField(desc, "completion", completion);
addField(desc, "propagation", propagation);
return desc.toString();
}
private void addField(StringBuilder builder, String key, Object value) {
if (value == null) {
return;
}
if (builder.length() > 0) {
builder.append(',');
}
builder.append(key).append(':').append(value);
}
}
|
|
package unit;
import static org.junit.Assert.*;
import org.junit.Test;
import static libnp.util.Operation.loadFreeform;
import static libnp.util.Operation.loadArray;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import libnp.random.xfamily.lognormal;
import libnp.random.dirac;
import libnp.random.variable;
import libnp.programs.crp_check;
import static libnp.util.Operation.concat;
import static java.lang.Math.abs;
import static java.lang.Math.exp;
import static java.lang.Math.floor;
import static java.lang.Math.ceil;
import static libnp.programs.crp_check.chiSquareCounts;
import static libnp.programs.crp_check.mlAlpha;
import static libnp.statistics.Frequentist.ksTest;
import hdp.hdp;
import hdp.Core.Sampler;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.distribution.NormalDistribution;
import static org.apache.commons.math3.special.Gamma.GAMMA;
import org.junit.Test;
import DataSet.IO.FlatFile;
import static org.junit.Assert.*;
import org.junit.Test;
import static org.junit.Assert.*;
import static libnp.statistics.Frequentist.chiSquareTest;
import static libnp.statistics.SpecialFunctions.crp_sizes;
import static libnp.statistics.SpecialFunctions.sum;
import org.junit.Test;
import static java.lang.Math.log;
import org.junit.Test;
public class TestGammaPrior {
/* This test fragment makes sure each column of the sample
* values in statistics/Gamma are distributed according to the
* cdf that is provided
*/
public void evaluateLog(String name, UnivariateFunction cdf) {
String test_dir = System.getProperty("user.home") + "/test/hdp/priorGamma/" + name;
new File(test_dir).mkdirs();
String infile = test_dir + "/gamma";
double[][] Gamma = loadArray(infile);
for (int col = 0; col < Gamma[0].length; col++) {
double[] gamma = new double[Gamma.length];
for (int i = 0; i < Gamma.length; i++) {
gamma[i] = log(Gamma[i][col]);
}
double D = ksTest(gamma, cdf);
assertTrue("D-Value for ks-test was " + D, D < 0.1);
}
}
public void evaluate(String name, UnivariateFunction cdf) {
String test_dir = System.getProperty("user.home") + "/test/hdp/priorGamma/" + name;
new File(test_dir).mkdirs();
String infile = test_dir + "/gamma";
double[][] Gamma = loadArray(infile);
for (int col = 0; col < Gamma[0].length; col++) {
double[] gamma = new double[Gamma.length];
for (int i = 0; i < Gamma.length; i++) {
gamma[i] = (Gamma[i][col]);
}
double D = ksTest(gamma, cdf);
assertTrue("D-Value for ks-test was " + D, D < 0.1);
}
}
public void TestGammaLogUniform(String name, String[] args) {
final double initial_gamma = 0.01;
final double max_gamma = initial_gamma * 1.0e3;
final double min_gamma = initial_gamma * 1e-3;
final double log_initial_gamma = log(initial_gamma);
final double log_min_gamma = log(min_gamma);
final double log_max_gamma = log(max_gamma);
int iters = 10000;
String test_dir = System.getProperty("user.home") + "/test/hdp/priorGamma/" + name + "/";
new File(test_dir).mkdirs();
double mass = 1.0;
{
Sampler sampler = hdp.init(concat(new String[] {
"-fixmu",
"-fixr",
"-fixmass",
"-gamma", min_gamma + "," + max_gamma + "," + initial_gamma,
"-statistics",
"-gamma_prior", "loguniform",
"-burnin", "0",
"-thin", "10",
"-random_restarts", "1",
"-iters", String.valueOf(iters),
"-mass", String.valueOf(mass),
"-output", test_dir}, args), null);
sampler.run();
evaluateLog(name, new UnivariateFunction() {{}
@Override
public double value(double x) {
return (x - log_min_gamma) / (log_max_gamma - log_min_gamma); }});
}
}
public void TestGammaExponential(String name, String[] args) {
final double initial_gamma = 0.01;
final double max_gamma = initial_gamma * 1.0e3;
final double min_gamma = initial_gamma * 1e-3;
final double log_initial_gamma = log(initial_gamma);
final double log_min_gamma = log(min_gamma);
final double log_max_gamma = log(max_gamma);
int iters = 10000;
String test_dir = System.getProperty("user.home") + "/test/hdp/priorGamma/" + name + "/";
new File(test_dir).mkdirs();
double mass = 1.0;
{
Sampler sampler = hdp.init(concat(new String[] {
"-fixmu",
"-fixr",
"-fixmass",
"-gamma", min_gamma + "," + max_gamma + "," + initial_gamma,
"-gamma_prior", "exponential",
"-statistics",
"-burnin", "0",
"-thin", "10",
"-random_restarts", "1",
"-iters", String.valueOf(iters),
"-mass", String.valueOf(mass),
"-output", test_dir}, args), null);
sampler.run();
evaluate(name, new UnivariateFunction() {{}
final double rate = 1.0/initial_gamma;
final double denom = 1.0/rate * (exp(-min_gamma * rate) - exp(-max_gamma * rate));
@Override
public double value(double x) {
double area = 1/rate * (exp(-min_gamma * rate) - exp(-x * rate));
return area/denom; }});
}
}
@Test
public void test_gamma05() {
TestGammaExponential("GAMMA05",
new String[]{"-unobserved", "3,3", "-fixb"});
}
@Test
public void test_gamma06() {
TestGammaExponential("GAMMA06",
new String[]{"-resample", "3,3", "-fixb"});
}
@Test
public void test_gamma07() {
TestGammaExponential("GAMMA07",
new String[]{"-unobserved", "3,3"});
}
@Test
public void test_gamma08() {
TestGammaExponential("GAMMA08",
new String[]{"-resample", "3,3"});
}
@Test
public void test_gamma01() {
TestGammaLogUniform("GAMMA01",
new String[]{"-unobserved", "3,3", "-fixb"});
}
@Test
public void test_gamma02() {
TestGammaLogUniform("GAMMA02",
new String[]{"-resample", "3,3", "-fixb"});
}
@Test
public void test_gamma03() {
TestGammaLogUniform("GAMMA03",
new String[]{"-unobserved", "3,3"});
}
@Test
public void test_gamma04() {
TestGammaLogUniform("GAMMA04",
new String[]{"-resample", "3,3"});
}
}
|
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.hadoop.fs;
import alluxio.Constants;
import alluxio.LocalAlluxioClusterResource;
import alluxio.hadoop.ConfUtils;
import alluxio.hadoop.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PositionedReadable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.URI;
import java.util.Date;
import java.util.Random;
import java.util.StringTokenizer;
/**
* Distributed i/o benchmark.
* <p>
* This test writes into or reads from a specified number of files. Number of bytes to write or read
* is specified as a parameter to the test. Each file is accessed in a separate map task.
* <p>
* The reducer collects the following statistics:
* <ul>
* <li>number of tasks completed</li>
* <li>number of bytes written/read</li>
* <li>execution time</li>
* <li>io rate</li>
* <li>io rate squared</li>
* </ul>
*
* Finally, the following information is appended to a local file
* <ul>
* <li>read or write test</li>
* <li>date and time the test finished</li>
* <li>number of files</li>
* <li>total number of bytes processed</li>
* <li>throughput in mb/sec (total number of bytes / sum of processing times)</li>
* <li>average i/o rate in mb/sec per file</li>
* <li>standard deviation of i/o rate</li>
* </ul>
*/
public class DFSIOIntegrationTest implements Tool {
// Constants for DFSIOIntegrationTest
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
private static final int DEFAULT_BUFFER_SIZE = 4096;
private static final String BASE_FILE_NAME = "test_io_";
private static final String DEFAULT_RES_FILE_NAME = "DFSIOIntegrationTest_results.log";
private static final long MEGA = ByteMultiple.MB.value();
private static final int DEFAULT_NR_BYTES = 16384;
private static final int DEFAULT_NR_FILES = 4;
private static boolean sGenerateReportFile = false;
private static final String USAGE = "Usage: " + DFSIOIntegrationTest.class.getSimpleName()
+ " [genericOptions]" + " -read [-random | -backward | -skip [-skipSize Size]] |"
+ " -write | -append | -clean" + " [-compression codecClassName]" + " [-nrFiles N]"
+ " [-size Size[B|KB|MB|GB|TB]]" + " [-resFile resultFileName] [-bufferSize Bytes]"
+ " [-rootDir]";
private org.apache.hadoop.conf.Configuration mConfig;
@ClassRule
public static LocalAlluxioClusterResource sLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder().build();
private static URI sLocalAlluxioClusterUri = null;
static {
org.apache.hadoop.conf.Configuration.addDefaultResource("hdfs-default.xml");
org.apache.hadoop.conf.Configuration.addDefaultResource("hdfs-site.xml");
org.apache.hadoop.conf.Configuration.addDefaultResource("mapred-default.xml");
org.apache.hadoop.conf.Configuration.addDefaultResource("mapred-site.xml");
}
private enum TestType {
TEST_TYPE_READ("read"), TEST_TYPE_WRITE("write"), TEST_TYPE_CLEANUP("cleanup"),
TEST_TYPE_APPEND("append"), TEST_TYPE_READ_RANDOM("random read"),
TEST_TYPE_READ_BACKWARD("backward read"), TEST_TYPE_READ_SKIP("skip read");
private String mType;
TestType(String t) {
mType = t;
}
@Override
// String
public String toString() {
return mType;
}
}
enum ByteMultiple {
B(1L), KB(0x400L), MB(0x100000L), GB(0x40000000L), TB(0x10000000000L);
private long mMultiplier;
ByteMultiple(long mult) {
mMultiplier = mult;
}
long value() {
return mMultiplier;
}
static ByteMultiple parseString(String sMultiple) {
if (sMultiple == null || sMultiple.isEmpty()) { // MB by default
return MB;
}
String sMU = sMultiple.toUpperCase();
if (B.name().toUpperCase().endsWith(sMU)) {
return B;
}
if (KB.name().toUpperCase().endsWith(sMU)) {
return KB;
}
if (MB.name().toUpperCase().endsWith(sMU)) {
return MB;
}
if (GB.name().toUpperCase().endsWith(sMU)) {
return GB;
}
if (TB.name().toUpperCase().endsWith(sMU)) {
return TB;
}
throw new IllegalArgumentException("Unsupported ByteMultiple " + sMultiple);
}
}
public DFSIOIntegrationTest() {
mConfig = new org.apache.hadoop.conf.Configuration();
}
private static String getBaseDir(org.apache.hadoop.conf.Configuration conf) {
return conf.get("test.dfsio.build.data", "/benchmarks/DFSIOIntegrationTest");
}
private static Path getControlDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_control");
}
private static Path getWriteDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_write");
}
private static Path getReadDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_read");
}
private static Path getAppendDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_append");
}
private static Path getRandomReadDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_random_read");
}
private static Path getDataDir(org.apache.hadoop.conf.Configuration conf) {
return new Path(getBaseDir(conf), "io_data");
}
private static DFSIOIntegrationTest sBench;
@BeforeClass
public static void beforeClass() throws Exception {
// Init DFSIOIntegrationTest
sBench = new DFSIOIntegrationTest();
sBench.getConf().setBoolean("dfs.support.append", true);
sLocalAlluxioClusterUri = URI.create(sLocalAlluxioClusterResource.get().getMasterURI());
sBench.getConf().set("fs.defaultFS", sLocalAlluxioClusterUri.toString());
sBench.getConf().set("fs.default.name", sLocalAlluxioClusterUri.toString());
sBench.getConf().set("fs." + Constants.SCHEME + ".impl", FileSystem.class.getName());
// Store Alluxio configuration in Hadoop configuration
ConfUtils.storeToHadoopConfiguration(sBench.getConf());
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
sBench.createControlFile(fs, DEFAULT_NR_BYTES, DEFAULT_NR_FILES);
/** Check write here, as it is required for other tests */
writeTest();
}
@AfterClass
public static void afterClass() throws Exception {
// Clear DFSIOIntegrationTest
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
sBench.cleanup(fs);
}
public static void writeTest() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.mapperWriteTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_WRITE, execTime);
}
@Test(timeout = 50000)
public void read() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.mapperReadTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_READ, execTime);
}
@Test(timeout = 50000)
public void readRandom() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.getConf().setLong("test.io.skip.size", 0);
sBench.randomReadTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_READ_RANDOM, execTime);
}
@Test(timeout = 50000)
public void readBackward() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.getConf().setLong("test.io.skip.size", -DEFAULT_BUFFER_SIZE);
sBench.randomReadTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_READ_BACKWARD, execTime);
}
@Test(timeout = 50000)
public void readSkip() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.getConf().setLong("test.io.skip.size", 1);
sBench.randomReadTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_READ_SKIP, execTime);
}
@Test(timeout = 50000)
public void readLargeSkip() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.getConf().setLong("test.io.skip.size", 5000);
sBench.randomReadTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_READ_SKIP, execTime);
}
// TODO(hy): Should active this unit test after ALLUXIO-25 has been solved
// @Test (timeout = 50000)
public void append() throws Exception {
org.apache.hadoop.fs.FileSystem fs =
org.apache.hadoop.fs.FileSystem.get(sLocalAlluxioClusterUri, sBench.getConf());
long tStart = System.currentTimeMillis();
sBench.mapperAppendTest(fs);
long execTime = System.currentTimeMillis() - tStart;
sBench.analyzeResult(fs, TestType.TEST_TYPE_APPEND, execTime);
}
@SuppressWarnings("deprecation")
private void createControlFile(org.apache.hadoop.fs.FileSystem fs, long nrBytes, // in bytes
int nrFiles) throws IOException {
LOG.info("creating control file: " + nrBytes + " bytes, " + nrFiles + " files");
Path controlDir = getControlDir(mConfig);
if (!fs.exists(controlDir)) {
fs.delete(controlDir, true);
for (int i = 0; i < nrFiles; i++) {
String name = getFileName(i);
Path controlFile = new Path(controlDir, "in_file_" + name);
SequenceFile.Writer writer = null;
try {
writer =
SequenceFile.createWriter(fs, mConfig, controlFile, Text.class, LongWritable.class,
CompressionType.NONE);
writer.append(new Text(name), new LongWritable(nrBytes));
} catch (Exception e) {
throw new IOException(e.getLocalizedMessage());
} finally {
if (writer != null) {
writer.close();
}
writer = null;
}
}
}
LOG.info("created control files for: " + nrFiles + " files");
}
private static String getFileName(int fIdx) {
return BASE_FILE_NAME + Integer.toString(fIdx);
}
/**
* Write/Read mapper base class.
* <p>
* Collects the following statistics per task:
* <ul>
* <li>number of tasks completed</li>
* <li>number of bytes written/read</li>
* <li>execution time</li>
* <li>i/o rate</li>
* <li>i/o rate squared</li>
* </ul>
*/
private abstract static class IOStatMapper extends AbstractIOMapper<Long> {
protected CompressionCodec mCompressionCodec;
IOStatMapper() {}
@Override
// Mapper
public void configure(JobConf conf) {
super.configure(conf);
// grab compression
String compression = getConf().get("test.io.compression.class", null);
Class<? extends CompressionCodec> codec;
// try to initialize codec
try {
codec =
(compression == null) ? null : Class.forName(compression).asSubclass(
CompressionCodec.class);
} catch (Exception e) {
throw new RuntimeException("Compression codec not found: ", e);
}
if (codec != null) {
mCompressionCodec = ReflectionUtils.newInstance(codec, getConf());
}
}
@Override
// AbstractIOMapper
void collectStats(OutputCollector<Text, Text> output, String name, long execTime, Long objSize)
throws IOException {
long totalSize = objSize;
float ioRateMbSec = (float) totalSize * 1000 / (execTime * MEGA);
LOG.info("Number of bytes processed = " + totalSize);
LOG.info("Exec time = " + execTime);
LOG.info("IO rate = " + ioRateMbSec);
output.collect(new Text(AccumulatingReducer.VALUE_TYPE_LONG + "tasks"),
new Text(String.valueOf(1)));
output.collect(new Text(AccumulatingReducer.VALUE_TYPE_LONG + "size"),
new Text(String.valueOf(totalSize)));
output.collect(new Text(AccumulatingReducer.VALUE_TYPE_LONG + "time"),
new Text(String.valueOf(execTime)));
output.collect(new Text(AccumulatingReducer.VALUE_TYPE_FLOAT + "rate"),
new Text(String.valueOf(ioRateMbSec * 1000)));
output.collect(new Text(AccumulatingReducer.VALUE_TYPE_FLOAT + "sqrate"),
new Text(String.valueOf(ioRateMbSec * ioRateMbSec * 1000)));
}
}
/**
* Write mapper class.
*/
public static class WriteMapper extends IOStatMapper {
public WriteMapper() {
for (int i = 0; i < mBufferSize; i++) {
mBuffer[i] = (byte) ('0' + i % 50);
}
}
@Override
// AbstractIOMapper
public Closeable getIOStream(String name) throws IOException {
// create file
OutputStream out = mFS.create(new Path(getDataDir(getConf()), name), true, mBufferSize);
if (mCompressionCodec != null) {
out = mCompressionCodec.createOutputStream(out);
}
LOG.info("out = " + out.getClass().getName());
return out;
}
@Override
// AbstractIOMapper, totalSize is in bytes
public Long doIO(Reporter reporter, String name, long totalSize) throws IOException {
OutputStream out = (OutputStream) this.mStream;
// write to the file
long nrRemaining;
for (nrRemaining = totalSize; nrRemaining > 0; nrRemaining -= mBufferSize) {
int curSize = (mBufferSize < nrRemaining) ? mBufferSize : (int) nrRemaining;
out.write(mBuffer, 0, curSize);
reporter.setStatus("writing " + name + "@" + (totalSize - nrRemaining) + "/" + totalSize
+ " ::host = " + mHostname);
}
return totalSize;
}
}
private void mapperWriteTest(org.apache.hadoop.fs.FileSystem fs) throws IOException {
Path writeDir = getWriteDir(mConfig);
fs.delete(getDataDir(mConfig), true);
fs.delete(writeDir, true);
runIOTest(WriteMapper.class, writeDir);
}
private void runIOTest(Class<? extends Mapper<Text, LongWritable, Text, Text>> mapperClass,
Path outputDir) throws IOException {
JobConf job = new JobConf(mConfig, DFSIOIntegrationTest.class);
FileInputFormat.setInputPaths(job, getControlDir(mConfig));
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(mapperClass);
job.setReducerClass(AccumulatingReducer.class);
FileOutputFormat.setOutputPath(job, outputDir);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setNumReduceTasks(1);
JobClient.runJob(job);
}
/**
* Append mapper class.
*/
public static class AppendMapper extends IOStatMapper {
public AppendMapper() {
for (int i = 0; i < mBufferSize; i++) {
mBuffer[i] = (byte) ('0' + i % 50);
}
}
@Override
// AbstractIOMapper
public Closeable getIOStream(String name) throws IOException {
// open file for append
OutputStream out = mFS.append(new Path(getDataDir(getConf()), name), mBufferSize);
if (mCompressionCodec != null) {
out = mCompressionCodec.createOutputStream(out);
}
LOG.info("out = " + out.getClass().getName());
return out;
}
@Override
// AbstractIOMapper, totalSize is in Bytes
public Long doIO(Reporter reporter, String name, long totalSize) throws IOException {
OutputStream out = (OutputStream) this.mStream;
// write to the file
long nrRemaining;
for (nrRemaining = totalSize; nrRemaining > 0; nrRemaining -= mBufferSize) {
int curSize = (mBufferSize < nrRemaining) ? mBufferSize : (int) nrRemaining;
out.write(mBuffer, 0, curSize);
reporter.setStatus("writing " + name + "@" + (totalSize - nrRemaining) + "/" + totalSize
+ " ::host = " + mHostname);
}
return totalSize;
}
}
private void mapperAppendTest(org.apache.hadoop.fs.FileSystem fs) throws IOException {
Path appendDir = getAppendDir(mConfig);
fs.delete(appendDir, true);
runIOTest(AppendMapper.class, appendDir);
}
/**
* Read mapper class.
*/
public static class ReadMapper extends IOStatMapper {
public ReadMapper() {}
@Override
// AbstractIOMapper
public Closeable getIOStream(String name) throws IOException {
// open file
InputStream in = mFS.open(new Path(getDataDir(getConf()), name));
if (mCompressionCodec != null) {
in = mCompressionCodec.createInputStream(in);
}
LOG.info("in = " + in.getClass().getName());
return in;
}
@Override
// AbstractIOMapper, totalSize in Bytes
public Long doIO(Reporter reporter, String name, long totalSize) throws IOException {
InputStream in = (InputStream) this.mStream;
long actualSize = 0;
while (actualSize < totalSize) {
int curSize = in.read(mBuffer, 0, mBufferSize);
if (curSize < 0) {
break;
}
actualSize += curSize;
reporter.setStatus("reading " + name + "@" + actualSize + "/" + totalSize + " ::host = "
+ mHostname);
}
return actualSize;
}
}
private void mapperReadTest(org.apache.hadoop.fs.FileSystem fs) throws IOException {
Path readDir = getReadDir(mConfig);
fs.delete(readDir, true);
runIOTest(ReadMapper.class, readDir);
}
/**
* Mapper class for random reads. The mapper chooses a position in the file and reads bufferSize
* bytes starting at the chosen position. It stops after reading the totalSize bytes, specified
* by size.
*
* There are three type of reads. 1) Random read always chooses a random position to read from:
* skipSize = 0 2) Backward read reads file in reverse order : skipSize < 0 3) Skip-read skips
* skipSize bytes after every read : skipSize > 0
*/
public static class RandomReadMapper extends IOStatMapper {
private Random mRnd;
private long mFileSize;
private long mSkipSize;
@Override
// Mapper
public void configure(JobConf conf) {
super.configure(conf);
mSkipSize = conf.getLong("test.io.skip.size", 0);
}
public RandomReadMapper() {
mRnd = new Random();
}
@Override
// AbstractIOMapper
public Closeable getIOStream(String name) throws IOException {
Path filePath = new Path(getDataDir(getConf()), name);
mFileSize = mFS.getFileStatus(filePath).getLen();
InputStream in = mFS.open(filePath);
if (mCompressionCodec != null) {
in = new FSDataInputStream(mCompressionCodec.createInputStream(in));
}
LOG.info("in = " + in.getClass().getName());
LOG.info("skipSize = " + mSkipSize);
return in;
}
@Override
// AbstractIOMapper, totalSize in Bytes
public Long doIO(Reporter reporter, String name, long totalSize) throws IOException {
PositionedReadable in = (PositionedReadable) this.mStream;
long actualSize = 0;
for (long pos = nextOffset(-1); actualSize < totalSize; pos = nextOffset(pos)) {
int curSize = in.read(pos, mBuffer, 0, mBufferSize);
if (curSize < 0) {
break;
}
actualSize += curSize;
reporter.setStatus("reading " + name + "@" + actualSize + "/" + totalSize + " ::host = "
+ mHostname);
}
return actualSize;
}
/**
* Get next offset for reading. If current < 0 then choose initial offset according to the read
* type.
*
* @param current offset
* @return the next offset for reading
*/
private long nextOffset(long current) {
if (mSkipSize == 0) {
return mRnd.nextInt((int) (mFileSize));
}
if (mSkipSize > 0) {
return (current < 0) ? 0 : (current + mBufferSize + mSkipSize);
}
// skipSize < 0
return (current < 0) ? Math.max(0, mFileSize - mBufferSize) : Math
.max(0, current + mSkipSize);
}
}
private void randomReadTest(org.apache.hadoop.fs.FileSystem fs) throws IOException {
Path readDir = getRandomReadDir(mConfig);
fs.delete(readDir, true);
runIOTest(RandomReadMapper.class, readDir);
}
// fileSize is in Bytes
private void sequentialTest(org.apache.hadoop.fs.FileSystem fs, TestType testType, long fileSize,
int nrFiles) throws IOException {
IOStatMapper ioer;
switch (testType) {
case TEST_TYPE_READ:
ioer = new ReadMapper();
break;
case TEST_TYPE_WRITE:
ioer = new WriteMapper();
break;
case TEST_TYPE_APPEND:
ioer = new AppendMapper();
break;
case TEST_TYPE_READ_RANDOM:
case TEST_TYPE_READ_BACKWARD:
case TEST_TYPE_READ_SKIP:
ioer = new RandomReadMapper();
break;
default:
return;
}
for (int i = 0; i < nrFiles; i++) {
ioer.doIO(Reporter.NULL, BASE_FILE_NAME + Integer.toString(i), fileSize);
}
ioer.close();
}
public static void main(String[] args) {
DFSIOIntegrationTest bench = new DFSIOIntegrationTest();
int res;
try {
res = ToolRunner.run(bench, args);
} catch (Exception e) {
System.err.print(StringUtils.stringifyException(e));
res = -2;
}
if (res == -1) {
System.err.print(USAGE);
}
System.exit(res);
}
@Override
// Tool
public int run(String[] args) throws IOException {
TestType testType = null;
int bufferSize = DEFAULT_BUFFER_SIZE;
long nrBytes = MEGA;
int nrFiles = 1;
long skipSize = 0;
String resFileName = DEFAULT_RES_FILE_NAME;
String compressionClass = null;
boolean isSequential = false;
String version = DFSIOIntegrationTest.class.getSimpleName() + ".1.7";
sGenerateReportFile = true;
LOG.info(version);
if (args.length == 0) {
System.err.println("Missing arguments.");
return -1;
}
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].startsWith("-read")) {
testType = TestType.TEST_TYPE_READ;
} else if (args[i].equals("-write")) {
testType = TestType.TEST_TYPE_WRITE;
} else if (args[i].equals("-append")) {
testType = TestType.TEST_TYPE_APPEND;
} else if (args[i].equals("-random")) {
if (testType != TestType.TEST_TYPE_READ) {
return -1;
}
testType = TestType.TEST_TYPE_READ_RANDOM;
} else if (args[i].equals("-backward")) {
if (testType != TestType.TEST_TYPE_READ) {
return -1;
}
testType = TestType.TEST_TYPE_READ_BACKWARD;
} else if (args[i].equals("-skip")) {
if (testType != TestType.TEST_TYPE_READ) {
return -1;
}
testType = TestType.TEST_TYPE_READ_SKIP;
} else if (args[i].equals("-clean")) {
testType = TestType.TEST_TYPE_CLEANUP;
} else if (args[i].startsWith("-seq")) {
isSequential = true;
} else if (args[i].startsWith("-compression")) {
compressionClass = args[++i];
} else if (args[i].equals("-nrFiles")) {
nrFiles = Integer.parseInt(args[++i]);
} else if (args[i].equals("-fileSize") || args[i].equals("-size")) {
nrBytes = parseSize(args[++i]);
} else if (args[i].equals("-skipSize")) {
skipSize = parseSize(args[++i]);
} else if (args[i].equals("-bufferSize")) {
bufferSize = Integer.parseInt(args[++i]);
} else if (args[i].equals("-resFile")) {
resFileName = args[++i];
} else {
System.err.println("Illegal argument: " + args[i]);
return -1;
}
}
if (testType == null) {
return -1;
}
if (testType == TestType.TEST_TYPE_READ_BACKWARD) {
skipSize = -bufferSize;
} else if (testType == TestType.TEST_TYPE_READ_SKIP && skipSize == 0) {
skipSize = bufferSize;
}
LOG.info("nrFiles = " + nrFiles);
LOG.info("nrBytes (MB) = " + toMB(nrBytes));
LOG.info("bufferSize = " + bufferSize);
if (skipSize > 0) {
LOG.info("skipSize = " + skipSize);
}
LOG.info("baseDir = " + getBaseDir(mConfig));
if (compressionClass != null) {
mConfig.set("test.io.compression.class", compressionClass);
LOG.info("compressionClass = " + compressionClass);
}
mConfig.setInt("test.io.file.buffer.size", bufferSize);
mConfig.setLong("test.io.skip.size", skipSize);
mConfig.setBoolean("dfs.support.append", true);
org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(mConfig);
if (isSequential) {
long tStart = System.currentTimeMillis();
sequentialTest(fs, testType, nrBytes, nrFiles);
long execTime = System.currentTimeMillis() - tStart;
String resultLine = "Seq Test exec time sec: " + (float) execTime / 1000;
LOG.info(resultLine);
return 0;
}
if (testType == TestType.TEST_TYPE_CLEANUP) {
cleanup(fs);
return 0;
}
createControlFile(fs, nrBytes, nrFiles);
long tStart = System.currentTimeMillis();
switch (testType) {
case TEST_TYPE_WRITE:
mapperWriteTest(fs);
break;
case TEST_TYPE_READ:
mapperReadTest(fs);
break;
case TEST_TYPE_APPEND:
mapperAppendTest(fs);
break;
case TEST_TYPE_READ_RANDOM:
case TEST_TYPE_READ_BACKWARD:
case TEST_TYPE_READ_SKIP:
randomReadTest(fs);
break;
default:
}
long execTime = System.currentTimeMillis() - tStart;
analyzeResult(fs, testType, execTime, resFileName);
return 0;
}
@Override
// Configurable
public org.apache.hadoop.conf.Configuration getConf() {
return this.mConfig;
}
@Override
// Configurable
public void setConf(org.apache.hadoop.conf.Configuration conf) {
mConfig = conf;
}
/**
* Returns size in bytes.
*
* @param arg = {d}[B|KB|MB|GB|TB]
* @return
*/
static long parseSize(String arg) {
String[] args = arg.split("\\D", 2); // get digits
assert args.length <= 2;
long nrBytes = Long.parseLong(args[0]);
String bytesMult = arg.substring(args[0].length()); // get byte multiple
return nrBytes * ByteMultiple.parseString(bytesMult).value();
}
static float toMB(long bytes) {
return ((float) bytes) / MEGA;
}
private void analyzeResult(org.apache.hadoop.fs.FileSystem fs, TestType testType, long execTime,
String resFileName) throws IOException {
Path reduceFile = getReduceFilePath(testType);
long tasks = 0;
long size = 0;
long time = 0;
float rate = 0;
float sqrate = 0;
DataInputStream in = null;
BufferedReader lines = null;
try {
in = new DataInputStream(fs.open(reduceFile));
lines = new BufferedReader(new InputStreamReader(in));
String line;
while ((line = lines.readLine()) != null) {
StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%");
String attr = tokens.nextToken();
if (attr.endsWith(":tasks")) {
tasks = Long.parseLong(tokens.nextToken());
} else if (attr.endsWith(":size")) {
size = Long.parseLong(tokens.nextToken());
} else if (attr.endsWith(":time")) {
time = Long.parseLong(tokens.nextToken());
} else if (attr.endsWith(":rate")) {
rate = Float.parseFloat(tokens.nextToken());
} else if (attr.endsWith(":sqrate")) {
sqrate = Float.parseFloat(tokens.nextToken());
}
}
} finally {
if (in != null) {
in.close();
}
if (lines != null) {
lines.close();
}
}
double med = rate / 1000 / tasks;
double stdDev = Math.sqrt(Math.abs(sqrate / 1000 / tasks - med * med));
String[] resultLines =
{"----- DFSIOIntegrationTest ----- : " + testType,
" Date & time: " + new Date(System.currentTimeMillis()),
" Number of files: " + tasks, "Total MBytes processed: " + toMB(size),
" Throughput mb/sec: " + size * 1000.0 / (time * MEGA),
"Average IO rate mb/sec: " + med, " IO rate std deviation: " + stdDev,
" Test exec time sec: " + (float) execTime / 1000, ""};
PrintStream res = null;
try {
if (sGenerateReportFile) {
res = new PrintStream(new FileOutputStream(new File(resFileName), true));
}
for (String resultLine : resultLines) {
LOG.info(resultLine);
if (sGenerateReportFile) {
res.println(resultLine);
} else {
System.out.println(resultLine);
}
}
} finally {
if (res != null) {
res.close();
}
}
}
private void analyzeResult(org.apache.hadoop.fs.FileSystem fs, TestType testType, long execTime)
throws IOException {
analyzeResult(fs, testType, execTime, DEFAULT_RES_FILE_NAME);
}
private Path getReduceFilePath(TestType testType) {
switch (testType) {
case TEST_TYPE_WRITE:
return new Path(getWriteDir(mConfig), "part-00000");
case TEST_TYPE_APPEND:
return new Path(getAppendDir(mConfig), "part-00000");
case TEST_TYPE_READ:
return new Path(getReadDir(mConfig), "part-00000");
case TEST_TYPE_READ_RANDOM:
case TEST_TYPE_READ_BACKWARD:
case TEST_TYPE_READ_SKIP:
return new Path(getRandomReadDir(mConfig), "part-00000");
default:
}
return null;
}
private void cleanup(org.apache.hadoop.fs.FileSystem fs) throws IOException {
LOG.info("Cleaning up test files");
fs.delete(new Path(getBaseDir(mConfig)), true);
}
}
|
|
package org.apache.lucene.codecs.memory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.TreeMap;
import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.codecs.memory.FSTTermsReader.TermsReader;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.TermState;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.automaton.ByteRunAutomaton;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
import org.apache.lucene.util.fst.BytesRefFSTEnum;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.Outputs;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util;
/**
* FST-based terms dictionary reader.
*
* The FST index maps each term and its ord, and during seek
* the ord is used fetch metadata from a single block.
* The term dictionary is fully memory resident.
*
* @lucene.experimental
*/
public class FSTOrdTermsReader extends FieldsProducer {
static final int INTERVAL = FSTOrdTermsWriter.SKIP_INTERVAL;
final TreeMap<String, TermsReader> fields = new TreeMap<>();
final PostingsReaderBase postingsReader;
int version;
//static final boolean TEST = false;
public FSTOrdTermsReader(SegmentReadState state, PostingsReaderBase postingsReader) throws IOException {
final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, FSTOrdTermsWriter.TERMS_INDEX_EXTENSION);
final String termsBlockFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, FSTOrdTermsWriter.TERMS_BLOCK_EXTENSION);
this.postingsReader = postingsReader;
ChecksumIndexInput indexIn = null;
IndexInput blockIn = null;
boolean success = false;
try {
indexIn = state.directory.openChecksumInput(termsIndexFileName, state.context);
blockIn = state.directory.openInput(termsBlockFileName, state.context);
version = readHeader(indexIn);
readHeader(blockIn);
if (version >= FSTOrdTermsWriter.TERMS_VERSION_CHECKSUM) {
CodecUtil.checksumEntireFile(blockIn);
}
this.postingsReader.init(blockIn);
seekDir(blockIn);
final FieldInfos fieldInfos = state.fieldInfos;
final int numFields = blockIn.readVInt();
for (int i = 0; i < numFields; i++) {
FieldInfo fieldInfo = fieldInfos.fieldInfo(blockIn.readVInt());
boolean hasFreq = fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY;
long numTerms = blockIn.readVLong();
long sumTotalTermFreq = hasFreq ? blockIn.readVLong() : -1;
long sumDocFreq = blockIn.readVLong();
int docCount = blockIn.readVInt();
int longsSize = blockIn.readVInt();
FST<Long> index = new FST<>(indexIn, PositiveIntOutputs.getSingleton());
TermsReader current = new TermsReader(fieldInfo, blockIn, numTerms, sumTotalTermFreq, sumDocFreq, docCount, longsSize, index);
TermsReader previous = fields.put(fieldInfo.name, current);
checkFieldSummary(state.segmentInfo, indexIn, blockIn, current, previous);
}
if (version >= FSTOrdTermsWriter.TERMS_VERSION_CHECKSUM) {
CodecUtil.checkFooter(indexIn);
} else {
CodecUtil.checkEOF(indexIn);
}
success = true;
} finally {
if (success) {
IOUtils.close(indexIn, blockIn);
} else {
IOUtils.closeWhileHandlingException(indexIn, blockIn);
}
}
}
private int readHeader(IndexInput in) throws IOException {
return CodecUtil.checkHeader(in, FSTOrdTermsWriter.TERMS_CODEC_NAME,
FSTOrdTermsWriter.TERMS_VERSION_START,
FSTOrdTermsWriter.TERMS_VERSION_CURRENT);
}
private void seekDir(IndexInput in) throws IOException {
if (version >= FSTOrdTermsWriter.TERMS_VERSION_CHECKSUM) {
in.seek(in.length() - CodecUtil.footerLength() - 8);
} else {
in.seek(in.length() - 8);
}
in.seek(in.readLong());
}
private void checkFieldSummary(SegmentInfo info, IndexInput indexIn, IndexInput blockIn, TermsReader field, TermsReader previous) throws IOException {
// #docs with field must be <= #docs
if (field.docCount < 0 || field.docCount > info.getDocCount()) {
throw new CorruptIndexException("invalid docCount: " + field.docCount + " maxDoc: " + info.getDocCount() + " (resource=" + indexIn + ", " + blockIn + ")");
}
// #postings must be >= #docs with field
if (field.sumDocFreq < field.docCount) {
throw new CorruptIndexException("invalid sumDocFreq: " + field.sumDocFreq + " docCount: " + field.docCount + " (resource=" + indexIn + ", " + blockIn + ")");
}
// #positions must be >= #postings
if (field.sumTotalTermFreq != -1 && field.sumTotalTermFreq < field.sumDocFreq) {
throw new CorruptIndexException("invalid sumTotalTermFreq: " + field.sumTotalTermFreq + " sumDocFreq: " + field.sumDocFreq + " (resource=" + indexIn + ", " + blockIn + ")");
}
if (previous != null) {
throw new CorruptIndexException("duplicate fields: " + field.fieldInfo.name + " (resource=" + indexIn + ", " + blockIn + ")");
}
}
@Override
public Iterator<String> iterator() {
return Collections.unmodifiableSet(fields.keySet()).iterator();
}
@Override
public Terms terms(String field) throws IOException {
assert field != null;
return fields.get(field);
}
@Override
public int size() {
return fields.size();
}
@Override
public void close() throws IOException {
try {
IOUtils.close(postingsReader);
} finally {
fields.clear();
}
}
final class TermsReader extends Terms {
final FieldInfo fieldInfo;
final long numTerms;
final long sumTotalTermFreq;
final long sumDocFreq;
final int docCount;
final int longsSize;
final FST<Long> index;
final int numSkipInfo;
final long[] skipInfo;
final byte[] statsBlock;
final byte[] metaLongsBlock;
final byte[] metaBytesBlock;
TermsReader(FieldInfo fieldInfo, IndexInput blockIn, long numTerms, long sumTotalTermFreq, long sumDocFreq, int docCount, int longsSize, FST<Long> index) throws IOException {
this.fieldInfo = fieldInfo;
this.numTerms = numTerms;
this.sumTotalTermFreq = sumTotalTermFreq;
this.sumDocFreq = sumDocFreq;
this.docCount = docCount;
this.longsSize = longsSize;
this.index = index;
assert (numTerms & (~0xffffffffL)) == 0;
final int numBlocks = (int)(numTerms + INTERVAL - 1) / INTERVAL;
this.numSkipInfo = longsSize + 3;
this.skipInfo = new long[numBlocks * numSkipInfo];
this.statsBlock = new byte[(int)blockIn.readVLong()];
this.metaLongsBlock = new byte[(int)blockIn.readVLong()];
this.metaBytesBlock = new byte[(int)blockIn.readVLong()];
int last = 0, next = 0;
for (int i = 1; i < numBlocks; i++) {
next = numSkipInfo * i;
for (int j = 0; j < numSkipInfo; j++) {
skipInfo[next + j] = skipInfo[last + j] + blockIn.readVLong();
}
last = next;
}
blockIn.readBytes(statsBlock, 0, statsBlock.length);
blockIn.readBytes(metaLongsBlock, 0, metaLongsBlock.length);
blockIn.readBytes(metaBytesBlock, 0, metaBytesBlock.length);
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public boolean hasFreqs() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0;
}
@Override
public boolean hasOffsets() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
}
@Override
public boolean hasPositions() {
return fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
}
@Override
public boolean hasPayloads() {
return fieldInfo.hasPayloads();
}
@Override
public long size() {
return numTerms;
}
@Override
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
@Override
public long getSumDocFreq() throws IOException {
return sumDocFreq;
}
@Override
public int getDocCount() throws IOException {
return docCount;
}
@Override
public TermsEnum iterator(TermsEnum reuse) throws IOException {
return new SegmentTermsEnum();
}
@Override
public TermsEnum intersect(CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
return new IntersectTermsEnum(compiled, startTerm);
}
// Only wraps common operations for PBF interact
abstract class BaseTermsEnum extends TermsEnum {
/* Current term's ord, starts from 0 */
long ord;
/* Current term stats + decoded metadata (customized by PBF) */
final BlockTermState state;
/* Datainput to load stats & metadata */
final ByteArrayDataInput statsReader = new ByteArrayDataInput();
final ByteArrayDataInput metaLongsReader = new ByteArrayDataInput();
final ByteArrayDataInput metaBytesReader = new ByteArrayDataInput();
/* To which block is buffered */
int statsBlockOrd;
int metaBlockOrd;
/* Current buffered metadata (long[] & byte[]) */
long[][] longs;
int[] bytesStart;
int[] bytesLength;
/* Current buffered stats (df & ttf) */
int[] docFreq;
long[] totalTermFreq;
BaseTermsEnum() throws IOException {
this.state = postingsReader.newTermState();
this.statsReader.reset(statsBlock);
this.metaLongsReader.reset(metaLongsBlock);
this.metaBytesReader.reset(metaBytesBlock);
this.longs = new long[INTERVAL][longsSize];
this.bytesStart = new int[INTERVAL];
this.bytesLength = new int[INTERVAL];
this.docFreq = new int[INTERVAL];
this.totalTermFreq = new long[INTERVAL];
this.statsBlockOrd = -1;
this.metaBlockOrd = -1;
if (!hasFreqs()) {
Arrays.fill(totalTermFreq, -1);
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
/** Decodes stats data into term state */
void decodeStats() throws IOException {
final int upto = (int)ord % INTERVAL;
final int oldBlockOrd = statsBlockOrd;
statsBlockOrd = (int)ord / INTERVAL;
if (oldBlockOrd != statsBlockOrd) {
refillStats();
}
state.docFreq = docFreq[upto];
state.totalTermFreq = totalTermFreq[upto];
}
/** Let PBF decode metadata */
void decodeMetaData() throws IOException {
final int upto = (int)ord % INTERVAL;
final int oldBlockOrd = metaBlockOrd;
metaBlockOrd = (int)ord / INTERVAL;
if (metaBlockOrd != oldBlockOrd) {
refillMetadata();
}
metaBytesReader.setPosition(bytesStart[upto]);
postingsReader.decodeTerm(longs[upto], metaBytesReader, fieldInfo, state, true);
}
/** Load current stats shard */
final void refillStats() throws IOException {
final int offset = statsBlockOrd * numSkipInfo;
final int statsFP = (int)skipInfo[offset];
statsReader.setPosition(statsFP);
for (int i = 0; i < INTERVAL && !statsReader.eof(); i++) {
int code = statsReader.readVInt();
if (hasFreqs()) {
docFreq[i] = (code >>> 1);
if ((code & 1) == 1) {
totalTermFreq[i] = docFreq[i];
} else {
totalTermFreq[i] = docFreq[i] + statsReader.readVLong();
}
} else {
docFreq[i] = code;
}
}
}
/** Load current metadata shard */
final void refillMetadata() throws IOException {
final int offset = metaBlockOrd * numSkipInfo;
final int metaLongsFP = (int)skipInfo[offset + 1];
final int metaBytesFP = (int)skipInfo[offset + 2];
metaLongsReader.setPosition(metaLongsFP);
for (int j = 0; j < longsSize; j++) {
longs[0][j] = skipInfo[offset + 3 + j] + metaLongsReader.readVLong();
}
bytesStart[0] = metaBytesFP;
bytesLength[0] = (int)metaLongsReader.readVLong();
for (int i = 1; i < INTERVAL && !metaLongsReader.eof(); i++) {
for (int j = 0; j < longsSize; j++) {
longs[i][j] = longs[i-1][j] + metaLongsReader.readVLong();
}
bytesStart[i] = bytesStart[i-1] + bytesLength[i-1];
bytesLength[i] = (int)metaLongsReader.readVLong();
}
}
@Override
public TermState termState() throws IOException {
decodeMetaData();
return state.clone();
}
@Override
public int docFreq() throws IOException {
return state.docFreq;
}
@Override
public long totalTermFreq() throws IOException {
return state.totalTermFreq;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
decodeMetaData();
return postingsReader.docs(fieldInfo, state, liveDocs, reuse, flags);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
if (!hasPositions()) {
return null;
}
decodeMetaData();
return postingsReader.docsAndPositions(fieldInfo, state, liveDocs, reuse, flags);
}
// TODO: this can be achieved by making use of Util.getByOutput()
// and should have related tests
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long ord() {
throw new UnsupportedOperationException();
}
}
// Iterates through all terms in this field
private final class SegmentTermsEnum extends BaseTermsEnum {
final BytesRefFSTEnum<Long> fstEnum;
/* Current term, null when enum ends or unpositioned */
BytesRef term;
/* True when current term's metadata is decoded */
boolean decoded;
/* True when current enum is 'positioned' by seekExact(TermState) */
boolean seekPending;
SegmentTermsEnum() throws IOException {
this.fstEnum = new BytesRefFSTEnum<>(index);
this.decoded = false;
this.seekPending = false;
}
@Override
public BytesRef term() throws IOException {
return term;
}
@Override
void decodeMetaData() throws IOException {
if (!decoded && !seekPending) {
super.decodeMetaData();
decoded = true;
}
}
// Update current enum according to FSTEnum
void updateEnum(final InputOutput<Long> pair) throws IOException {
if (pair == null) {
term = null;
} else {
term = pair.input;
ord = pair.output;
decodeStats();
}
decoded = false;
seekPending = false;
}
@Override
public BytesRef next() throws IOException {
if (seekPending) { // previously positioned, but termOutputs not fetched
seekPending = false;
SeekStatus status = seekCeil(term);
assert status == SeekStatus.FOUND; // must positioned on valid term
}
updateEnum(fstEnum.next());
return term;
}
@Override
public boolean seekExact(BytesRef target) throws IOException {
updateEnum(fstEnum.seekExact(target));
return term != null;
}
@Override
public SeekStatus seekCeil(BytesRef target) throws IOException {
updateEnum(fstEnum.seekCeil(target));
if (term == null) {
return SeekStatus.END;
} else {
return term.equals(target) ? SeekStatus.FOUND : SeekStatus.NOT_FOUND;
}
}
@Override
public void seekExact(BytesRef target, TermState otherState) {
if (!target.equals(term)) {
state.copyFrom(otherState);
term = BytesRef.deepCopyOf(target);
seekPending = true;
}
}
}
// Iterates intersect result with automaton (cannot seek!)
private final class IntersectTermsEnum extends BaseTermsEnum {
/* Current term, null when enum ends or unpositioned */
BytesRefBuilder term;
/* True when current term's metadata is decoded */
boolean decoded;
/* True when there is pending term when calling next() */
boolean pending;
/* stack to record how current term is constructed,
* used to accumulate metadata or rewind term:
* level == term.length + 1,
* == 0 when term is null */
Frame[] stack;
int level;
/* term dict fst */
final FST<Long> fst;
final FST.BytesReader fstReader;
final Outputs<Long> fstOutputs;
/* query automaton to intersect with */
final ByteRunAutomaton fsa;
private final class Frame {
/* fst stats */
FST.Arc<Long> arc;
/* automaton stats */
int state;
Frame() {
this.arc = new FST.Arc<>();
this.state = -1;
}
public String toString() {
return "arc=" + arc + " state=" + state;
}
}
IntersectTermsEnum(CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
//if (TEST) System.out.println("Enum init, startTerm=" + startTerm);
this.fst = index;
this.fstReader = fst.getBytesReader();
this.fstOutputs = index.outputs;
this.fsa = compiled.runAutomaton;
this.level = -1;
this.stack = new Frame[16];
for (int i = 0 ; i < stack.length; i++) {
this.stack[i] = new Frame();
}
Frame frame;
frame = loadVirtualFrame(newFrame());
this.level++;
frame = loadFirstFrame(newFrame());
pushFrame(frame);
this.decoded = false;
this.pending = false;
if (startTerm == null) {
pending = isAccept(topFrame());
} else {
doSeekCeil(startTerm);
pending = (term == null || !startTerm.equals(term.get())) && isValid(topFrame()) && isAccept(topFrame());
}
}
@Override
public BytesRef term() throws IOException {
return term == null ? null : term.get();
}
@Override
void decodeMetaData() throws IOException {
if (!decoded) {
super.decodeMetaData();
decoded = true;
}
}
@Override
void decodeStats() throws IOException {
final FST.Arc<Long> arc = topFrame().arc;
assert arc.nextFinalOutput == fstOutputs.getNoOutput();
ord = arc.output;
super.decodeStats();
}
@Override
public SeekStatus seekCeil(BytesRef target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public BytesRef next() throws IOException {
//if (TEST) System.out.println("Enum next()");
if (pending) {
pending = false;
decodeStats();
return term();
}
decoded = false;
DFS:
while (level > 0) {
Frame frame = newFrame();
if (loadExpandFrame(topFrame(), frame) != null) { // has valid target
pushFrame(frame);
if (isAccept(frame)) { // gotcha
break;
}
continue; // check next target
}
frame = popFrame();
while(level > 0) {
if (loadNextFrame(topFrame(), frame) != null) { // has valid sibling
pushFrame(frame);
if (isAccept(frame)) { // gotcha
break DFS;
}
continue DFS; // check next target
}
frame = popFrame();
}
return null;
}
decodeStats();
return term();
}
BytesRef doSeekCeil(BytesRef target) throws IOException {
//if (TEST) System.out.println("Enum doSeekCeil()");
Frame frame= null;
int label, upto = 0, limit = target.length;
while (upto < limit) { // to target prefix, or ceil label (rewind prefix)
frame = newFrame();
label = target.bytes[upto] & 0xff;
frame = loadCeilFrame(label, topFrame(), frame);
if (frame == null || frame.arc.label != label) {
break;
}
assert isValid(frame); // target must be fetched from automaton
pushFrame(frame);
upto++;
}
if (upto == limit) { // got target
return term();
}
if (frame != null) { // got larger term('s prefix)
pushFrame(frame);
return isAccept(frame) ? term() : next();
}
while (level > 0) { // got target's prefix, advance to larger term
frame = popFrame();
while (level > 0 && !canRewind(frame)) {
frame = popFrame();
}
if (loadNextFrame(topFrame(), frame) != null) {
pushFrame(frame);
return isAccept(frame) ? term() : next();
}
}
return null;
}
/** Virtual frame, never pop */
Frame loadVirtualFrame(Frame frame) throws IOException {
frame.arc.output = fstOutputs.getNoOutput();
frame.arc.nextFinalOutput = fstOutputs.getNoOutput();
frame.state = -1;
return frame;
}
/** Load frame for start arc(node) on fst */
Frame loadFirstFrame(Frame frame) throws IOException {
frame.arc = fst.getFirstArc(frame.arc);
frame.state = fsa.getInitialState();
return frame;
}
/** Load frame for target arc(node) on fst */
Frame loadExpandFrame(Frame top, Frame frame) throws IOException {
if (!canGrow(top)) {
return null;
}
frame.arc = fst.readFirstRealTargetArc(top.arc.target, frame.arc, fstReader);
frame.state = fsa.step(top.state, frame.arc.label);
//if (TEST) System.out.println(" loadExpand frame="+frame);
if (frame.state == -1) {
return loadNextFrame(top, frame);
}
return frame;
}
/** Load frame for sibling arc(node) on fst */
Frame loadNextFrame(Frame top, Frame frame) throws IOException {
if (!canRewind(frame)) {
return null;
}
while (!frame.arc.isLast()) {
frame.arc = fst.readNextRealArc(frame.arc, fstReader);
frame.state = fsa.step(top.state, frame.arc.label);
if (frame.state != -1) {
break;
}
}
//if (TEST) System.out.println(" loadNext frame="+frame);
if (frame.state == -1) {
return null;
}
return frame;
}
/** Load frame for target arc(node) on fst, so that
* arc.label >= label and !fsa.reject(arc.label) */
Frame loadCeilFrame(int label, Frame top, Frame frame) throws IOException {
FST.Arc<Long> arc = frame.arc;
arc = Util.readCeilArc(label, fst, top.arc, arc, fstReader);
if (arc == null) {
return null;
}
frame.state = fsa.step(top.state, arc.label);
//if (TEST) System.out.println(" loadCeil frame="+frame);
if (frame.state == -1) {
return loadNextFrame(top, frame);
}
return frame;
}
boolean isAccept(Frame frame) { // reach a term both fst&fsa accepts
return fsa.isAccept(frame.state) && frame.arc.isFinal();
}
boolean isValid(Frame frame) { // reach a prefix both fst&fsa won't reject
return /*frame != null &&*/ frame.state != -1;
}
boolean canGrow(Frame frame) { // can walk forward on both fst&fsa
return frame.state != -1 && FST.targetHasArcs(frame.arc);
}
boolean canRewind(Frame frame) { // can jump to sibling
return !frame.arc.isLast();
}
void pushFrame(Frame frame) {
final FST.Arc<Long> arc = frame.arc;
arc.output = fstOutputs.add(topFrame().arc.output, arc.output);
term = grow(arc.label);
level++;
assert frame == stack[level];
}
Frame popFrame() {
term = shrink();
return stack[level--];
}
Frame newFrame() {
if (level+1 == stack.length) {
final Frame[] temp = new Frame[ArrayUtil.oversize(level+2, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(stack, 0, temp, 0, stack.length);
for (int i = stack.length; i < temp.length; i++) {
temp[i] = new Frame();
}
stack = temp;
}
return stack[level+1];
}
Frame topFrame() {
return stack[level];
}
BytesRefBuilder grow(int label) {
if (term == null) {
term = new BytesRefBuilder();
} else {
term.append((byte) label);
}
return term;
}
BytesRefBuilder shrink() {
if (term.length() == 0) {
term = null;
} else {
term.setLength(term.length() - 1);
}
return term;
}
}
}
static<T> void walk(FST<T> fst) throws IOException {
final ArrayList<FST.Arc<T>> queue = new ArrayList<>();
final BitSet seen = new BitSet();
final FST.BytesReader reader = fst.getBytesReader();
final FST.Arc<T> startArc = fst.getFirstArc(new FST.Arc<T>());
queue.add(startArc);
while (!queue.isEmpty()) {
final FST.Arc<T> arc = queue.remove(0);
final long node = arc.target;
//System.out.println(arc);
if (FST.targetHasArcs(arc) && !seen.get((int) node)) {
seen.set((int) node);
fst.readFirstRealTargetArc(node, arc, reader);
while (true) {
queue.add(new FST.Arc<T>().copyFrom(arc));
if (arc.isLast()) {
break;
} else {
fst.readNextRealArc(arc, reader);
}
}
}
}
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = postingsReader.ramBytesUsed();
for (TermsReader r : fields.values()) {
if (r.index != null) {
ramBytesUsed += r.index.ramBytesUsed();
ramBytesUsed += RamUsageEstimator.sizeOf(r.metaBytesBlock);
ramBytesUsed += RamUsageEstimator.sizeOf(r.metaLongsBlock);
ramBytesUsed += RamUsageEstimator.sizeOf(r.skipInfo);
ramBytesUsed += RamUsageEstimator.sizeOf(r.statsBlock);
}
}
return ramBytesUsed;
}
@Override
public void checkIntegrity() throws IOException {
postingsReader.checkIntegrity();
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.enumerable;
import org.apache.calcite.adapter.enumerable.impl.AggAddContextImpl;
import org.apache.calcite.adapter.enumerable.impl.AggResultContextImpl;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.config.CalciteSystemProperty;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.linq4j.function.Function2;
import org.apache.calcite.linq4j.tree.BlockBuilder;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.apache.calcite.linq4j.tree.ParameterExpression;
import org.apache.calcite.linq4j.tree.Types;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.hint.RelHint;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.util.BuiltInMethod;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import static java.util.Objects.requireNonNull;
/** Base class for EnumerableAggregate and EnumerableSortedAggregate. */
public abstract class EnumerableAggregateBase extends Aggregate {
protected EnumerableAggregateBase(
RelOptCluster cluster,
RelTraitSet traitSet,
List<RelHint> hints,
RelNode input,
ImmutableBitSet groupSet,
@Nullable List<ImmutableBitSet> groupSets,
List<AggregateCall> aggCalls) {
super(cluster, traitSet, hints, input, groupSet, groupSets, aggCalls);
}
protected static boolean hasOrderedCall(List<AggImpState> aggs) {
for (AggImpState agg : aggs) {
if (!agg.call.collation.equals(RelCollations.EMPTY)) {
return true;
}
}
return false;
}
protected void declareParentAccumulator(List<Expression> initExpressions,
BlockBuilder initBlock, PhysType accPhysType) {
if (accPhysType.getJavaRowType()
instanceof JavaTypeFactoryImpl.SyntheticRecordType) {
// We have to initialize the SyntheticRecordType instance this way, to
// avoid using a class constructor with too many parameters.
final JavaTypeFactoryImpl.SyntheticRecordType synType =
(JavaTypeFactoryImpl.SyntheticRecordType)
accPhysType.getJavaRowType();
final ParameterExpression record0_ =
Expressions.parameter(accPhysType.getJavaRowType(), "record0");
initBlock.add(Expressions.declare(0, record0_, null));
initBlock.add(
Expressions.statement(
Expressions.assign(record0_,
Expressions.new_(accPhysType.getJavaRowType()))));
List<Types.RecordField> fieldList = synType.getRecordFields();
for (int i = 0; i < initExpressions.size(); i++) {
Expression right = initExpressions.get(i);
initBlock.add(
Expressions.statement(
Expressions.assign(
Expressions.field(record0_, fieldList.get(i)), right)));
}
initBlock.add(record0_);
} else {
initBlock.add(accPhysType.record(initExpressions));
}
}
/**
* Implements the {@link AggregateLambdaFactory}.
*
* <p>Behavior depends upon ordering:
* <ul>
*
* <li>{@code hasOrderedCall == true} means there is at least one aggregate
* call including sort spec. We use {@link LazyAggregateLambdaFactory}
* implementation to implement sorted aggregates for that.
*
* <li>{@code hasOrderedCall == false} indicates to use
* {@link BasicAggregateLambdaFactory} to implement a non-sort
* aggregate.
*
* </ul>
*/
protected void implementLambdaFactory(BlockBuilder builder,
PhysType inputPhysType, List<AggImpState> aggs,
Expression accumulatorInitializer, boolean hasOrderedCall,
ParameterExpression lambdaFactory) {
if (hasOrderedCall) {
ParameterExpression pe = Expressions.parameter(List.class,
builder.newName("lazyAccumulators"));
builder.add(
Expressions.declare(0, pe, Expressions.new_(LinkedList.class)));
for (AggImpState agg : aggs) {
if (agg.call.collation.equals(RelCollations.EMPTY)) {
// if the call does not require ordering, fallback to
// use a non-sorted lazy accumulator.
builder.add(
Expressions.statement(
Expressions.call(pe,
BuiltInMethod.COLLECTION_ADD.method,
Expressions.new_(BuiltInMethod.BASIC_LAZY_ACCUMULATOR.constructor,
requireNonNull(agg.accumulatorAdder, "agg.accumulatorAdder")))));
continue;
}
final Pair<Expression, Expression> pair =
inputPhysType.generateCollationKey(
agg.call.collation.getFieldCollations());
builder.add(
Expressions.statement(
Expressions.call(pe,
BuiltInMethod.COLLECTION_ADD.method,
Expressions.new_(BuiltInMethod.SOURCE_SORTER.constructor,
requireNonNull(agg.accumulatorAdder, "agg.accumulatorAdder"),
pair.left, pair.right))));
}
builder.add(
Expressions.declare(0, lambdaFactory,
Expressions.new_(
BuiltInMethod.LAZY_AGGREGATE_LAMBDA_FACTORY.constructor,
accumulatorInitializer, pe)));
} else {
// when hasOrderedCall == false
ParameterExpression pe = Expressions.parameter(List.class,
builder.newName("accumulatorAdders"));
builder.add(
Expressions.declare(0, pe, Expressions.new_(LinkedList.class)));
for (AggImpState agg : aggs) {
builder.add(
Expressions.statement(
Expressions.call(pe, BuiltInMethod.COLLECTION_ADD.method,
requireNonNull(agg.accumulatorAdder, "agg.accumulatorAdder"))));
}
builder.add(
Expressions.declare(0, lambdaFactory,
Expressions.new_(
BuiltInMethod.BASIC_AGGREGATE_LAMBDA_FACTORY.constructor,
accumulatorInitializer, pe)));
}
}
/** An implementation of {@link AggContext}. */
protected class AggContextImpl implements AggContext {
private final AggImpState agg;
private final JavaTypeFactory typeFactory;
AggContextImpl(AggImpState agg, JavaTypeFactory typeFactory) {
this.agg = agg;
this.typeFactory = typeFactory;
}
@Override public SqlAggFunction aggregation() {
return agg.call.getAggregation();
}
@Override public RelDataType returnRelType() {
return agg.call.type;
}
@Override public Type returnType() {
return EnumUtils.javaClass(typeFactory, returnRelType());
}
@Override public List<? extends RelDataType> parameterRelTypes() {
return EnumUtils.fieldRowTypes(getInput().getRowType(), null,
agg.call.getArgList());
}
@Override public List<? extends Type> parameterTypes() {
return EnumUtils.fieldTypes(
typeFactory,
parameterRelTypes());
}
@Override public List<ImmutableBitSet> groupSets() {
return groupSets;
}
@Override public List<Integer> keyOrdinals() {
return groupSet.asList();
}
@Override public List<? extends RelDataType> keyRelTypes() {
return EnumUtils.fieldRowTypes(getInput().getRowType(), null,
groupSet.asList());
}
@Override public List<? extends Type> keyTypes() {
return EnumUtils.fieldTypes(typeFactory, keyRelTypes());
}
}
protected void createAccumulatorAdders(
final ParameterExpression inParameter,
final List<AggImpState> aggs,
final PhysType accPhysType,
final ParameterExpression accExpr,
final PhysType inputPhysType,
final BlockBuilder builder,
EnumerableRelImplementor implementor,
JavaTypeFactory typeFactory) {
for (int i = 0, stateOffset = 0; i < aggs.size(); i++) {
final BlockBuilder builder2 = new BlockBuilder();
final AggImpState agg = aggs.get(i);
final int stateSize = requireNonNull(agg.state, "agg.state").size();
final List<Expression> accumulator = new ArrayList<>(stateSize);
for (int j = 0; j < stateSize; j++) {
accumulator.add(accPhysType.fieldReference(accExpr, j + stateOffset));
}
agg.state = accumulator;
stateOffset += stateSize;
AggAddContext addContext =
new AggAddContextImpl(builder2, accumulator) {
@Override public List<RexNode> rexArguments() {
List<RelDataTypeField> inputTypes =
inputPhysType.getRowType().getFieldList();
List<RexNode> args = new ArrayList<>();
for (int index : agg.call.getArgList()) {
args.add(RexInputRef.of(index, inputTypes));
}
return args;
}
@Override public @Nullable RexNode rexFilterArgument() {
return agg.call.filterArg < 0
? null
: RexInputRef.of(agg.call.filterArg,
inputPhysType.getRowType());
}
@Override public RexToLixTranslator rowTranslator() {
return RexToLixTranslator.forAggregation(typeFactory,
currentBlock(),
new RexToLixTranslator.InputGetterImpl(inParameter,
inputPhysType),
implementor.getConformance());
}
};
agg.implementor.implementAdd(requireNonNull(agg.context, "agg.context"), addContext);
builder2.add(accExpr);
agg.accumulatorAdder = builder.append("accumulatorAdder",
Expressions.lambda(Function2.class, builder2.toBlock(), accExpr,
inParameter));
}
}
protected List<Type> createAggStateTypes(
final List<Expression> initExpressions,
final BlockBuilder initBlock,
final List<AggImpState> aggs,
JavaTypeFactory typeFactory) {
final List<Type> aggStateTypes = new ArrayList<>();
for (final AggImpState agg : aggs) {
agg.context = new AggContextImpl(agg, typeFactory);
final List<Type> state = agg.implementor.getStateType(agg.context);
if (state.isEmpty()) {
agg.state = ImmutableList.of();
continue;
}
aggStateTypes.addAll(state);
final List<Expression> decls = new ArrayList<>(state.size());
for (int i = 0; i < state.size(); i++) {
String aggName = "a" + agg.aggIdx;
if (CalciteSystemProperty.DEBUG.value()) {
aggName = Util.toJavaId(agg.call.getAggregation().getName(), 0)
.substring("ID$0$".length()) + aggName;
}
Type type = state.get(i);
ParameterExpression pe =
Expressions.parameter(type,
initBlock.newName(aggName + "s" + i));
initBlock.add(Expressions.declare(0, pe, null));
decls.add(pe);
}
agg.state = decls;
initExpressions.addAll(decls);
agg.implementor.implementReset(agg.context,
new AggResultContextImpl(initBlock, agg.call, decls, null, null));
}
return aggStateTypes;
}
}
|
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.presto.ScheduledSplit;
import com.facebook.presto.TaskSource;
import com.facebook.presto.metadata.Split;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.UpdatablePageSource;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
import javax.annotation.concurrent.GuardedBy;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
import static com.facebook.presto.operator.Operator.NOT_BLOCKED;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
//
// NOTE: As a general strategy the methods should "stage" a change and only
// process the actual change before lock release (DriverLockResult.close()).
// The assures that only one thread will be working with the operators at a
// time and state changer threads are not blocked.
//
public class Driver
implements Closeable
{
private static final Logger log = Logger.get(Driver.class);
private final DriverContext driverContext;
private final List<Operator> operators;
private final Map<PlanNodeId, SourceOperator> sourceOperators;
private final Optional<DeleteOperator> deleteOperator;
private final ConcurrentMap<PlanNodeId, TaskSource> newSources = new ConcurrentHashMap<>();
private final AtomicReference<State> state = new AtomicReference<>(State.ALIVE);
private final ReentrantLock exclusiveLock = new ReentrantLock();
@GuardedBy("this")
private Thread lockHolder;
@GuardedBy("exclusiveLock")
private final Map<PlanNodeId, TaskSource> currentSources = new ConcurrentHashMap<>();
private enum State
{
ALIVE, NEED_DESTRUCTION, DESTROYED
}
public Driver(DriverContext driverContext, Operator firstOperator, Operator... otherOperators)
{
this(requireNonNull(driverContext, "driverContext is null"),
ImmutableList.<Operator>builder()
.add(requireNonNull(firstOperator, "firstOperator is null"))
.add(requireNonNull(otherOperators, "otherOperators is null"))
.build());
}
public Driver(DriverContext driverContext, List<Operator> operators)
{
this.driverContext = requireNonNull(driverContext, "driverContext is null");
this.operators = ImmutableList.copyOf(requireNonNull(operators, "operators is null"));
checkArgument(!operators.isEmpty(), "There must be at least one operator");
ImmutableMap.Builder<PlanNodeId, SourceOperator> sourceOperators = ImmutableMap.builder();
Optional<DeleteOperator> deleteOperator = Optional.empty();
for (Operator operator : operators) {
if (operator instanceof SourceOperator) {
SourceOperator sourceOperator = (SourceOperator) operator;
sourceOperators.put(sourceOperator.getSourceId(), sourceOperator);
}
else if (operator instanceof DeleteOperator) {
checkArgument(!deleteOperator.isPresent(), "There must be at most one DeleteOperator");
deleteOperator = Optional.of((DeleteOperator) operator);
}
}
this.sourceOperators = sourceOperators.build();
this.deleteOperator = deleteOperator;
}
public DriverContext getDriverContext()
{
return driverContext;
}
public Set<PlanNodeId> getSourceIds()
{
return sourceOperators.keySet();
}
@Override
public void close()
{
// mark the service for destruction
if (!state.compareAndSet(State.ALIVE, State.NEED_DESTRUCTION)) {
return;
}
// if we can get the lock, attempt a clean shutdown; otherwise someone else will shutdown
try (DriverLockResult lockResult = tryLockAndProcessPendingStateChanges(0, TimeUnit.MILLISECONDS)) {
// if we did not get the lock, interrupt the lock holder
if (!lockResult.wasAcquired()) {
// there is a benign race condition here were the lock holder
// can be change between attempting to get lock and grabbing
// the synchronized lock here, but in either case we want to
// interrupt the lock holder thread
synchronized (this) {
if (lockHolder != null) {
lockHolder.interrupt();
}
}
}
// clean shutdown is automatically triggered during lock release
}
}
public boolean isFinished()
{
checkLockNotHeld("Can not check finished status while holding the driver lock");
// if we can get the lock, attempt a clean shutdown; otherwise someone else will shutdown
try (DriverLockResult lockResult = tryLockAndProcessPendingStateChanges(0, TimeUnit.MILLISECONDS)) {
if (lockResult.wasAcquired()) {
return isFinishedInternal();
}
else {
// did not get the lock, so we can't check operators, or destroy
return state.get() != State.ALIVE || driverContext.isDone();
}
}
}
private boolean isFinishedInternal()
{
checkLockHeld("Lock must be held to call isFinishedInternal");
boolean finished = state.get() != State.ALIVE || driverContext.isDone() || operators.get(operators.size() - 1).isFinished();
if (finished) {
state.compareAndSet(State.ALIVE, State.NEED_DESTRUCTION);
}
return finished;
}
public void updateSource(TaskSource source)
{
checkLockNotHeld("Can not update sources while holding the driver lock");
// does this driver have an operator for the specified source?
if (!sourceOperators.containsKey(source.getPlanNodeId())) {
return;
}
// stage the new updates
while (true) {
// attempt to update directly to the new source
TaskSource currentNewSource = newSources.putIfAbsent(source.getPlanNodeId(), source);
// if update succeeded, just break
if (currentNewSource == null) {
break;
}
// merge source into the current new source
TaskSource newSource = currentNewSource.update(source);
// if this is not a new source, just return
if (newSource == currentNewSource) {
break;
}
// attempt to replace the currentNewSource with the new source
if (newSources.replace(source.getPlanNodeId(), currentNewSource, newSource)) {
break;
}
// someone else updated while we were processing
}
// attempt to get the lock and process the updates we staged above
// updates will be processed in close if and only if we got the lock
tryLockAndProcessPendingStateChanges(0, TimeUnit.MILLISECONDS).close();
}
private void processNewSources()
{
checkLockHeld("Lock must be held to call processNewSources");
// only update if the driver is still alive
if (state.get() != State.ALIVE) {
return;
}
// copy the pending sources
// it is ok to "miss" a source added during the copy as it will be
// handled on the next call to this method
Map<PlanNodeId, TaskSource> sources = new HashMap<>(newSources);
for (Entry<PlanNodeId, TaskSource> entry : sources.entrySet()) {
// Remove the entries we are going to process from the newSources map.
// It is ok if someone already updated the entry; we will catch it on
// the next iteration.
newSources.remove(entry.getKey(), entry.getValue());
processNewSource(entry.getValue());
}
}
private void processNewSource(TaskSource source)
{
checkLockHeld("Lock must be held to call processNewSources");
// create new source
Set<ScheduledSplit> newSplits;
TaskSource currentSource = currentSources.get(source.getPlanNodeId());
if (currentSource == null) {
newSplits = source.getSplits();
currentSources.put(source.getPlanNodeId(), source);
}
else {
// merge the current source and the specified source
TaskSource newSource = currentSource.update(source);
// if this is not a new source, just return
if (newSource == currentSource) {
return;
}
// find the new splits to add
newSplits = Sets.difference(newSource.getSplits(), currentSource.getSplits());
currentSources.put(source.getPlanNodeId(), newSource);
}
// add new splits
for (ScheduledSplit newSplit : newSplits) {
Split split = newSplit.getSplit();
SourceOperator sourceOperator = sourceOperators.get(source.getPlanNodeId());
if (sourceOperator != null) {
Supplier<Optional<UpdatablePageSource>> pageSource = sourceOperator.addSplit(split);
if (deleteOperator.isPresent()) {
deleteOperator.get().setPageSource(pageSource);
}
}
}
// set no more splits
if (source.isNoMoreSplits()) {
sourceOperators.get(source.getPlanNodeId()).noMoreSplits();
}
}
public ListenableFuture<?> processFor(Duration duration)
{
checkLockNotHeld("Can not process for a duration while holding the driver lock");
requireNonNull(duration, "duration is null");
long maxRuntime = duration.roundTo(TimeUnit.NANOSECONDS);
try (DriverLockResult lockResult = tryLockAndProcessPendingStateChanges(100, TimeUnit.MILLISECONDS)) {
if (lockResult.wasAcquired()) {
driverContext.startProcessTimer();
try {
long start = System.nanoTime();
do {
ListenableFuture<?> future = processInternal();
if (!future.isDone()) {
return future;
}
}
while (System.nanoTime() - start < maxRuntime && !isFinishedInternal());
}
finally {
driverContext.recordProcessed();
}
}
}
return NOT_BLOCKED;
}
public ListenableFuture<?> process()
{
checkLockNotHeld("Can not process while holding the driver lock");
try (DriverLockResult lockResult = tryLockAndProcessPendingStateChanges(100, TimeUnit.MILLISECONDS)) {
if (!lockResult.wasAcquired()) {
// this is unlikely to happen unless the driver is being
// destroyed and in that case the caller should notice notice
// this state change by calling isFinished
return NOT_BLOCKED;
}
return processInternal();
}
}
private ListenableFuture<?> processInternal()
{
checkLockHeld("Lock must be held to call processInternal");
try {
if (!newSources.isEmpty()) {
processNewSources();
}
// special handling for drivers with a single operator
if (operators.size() == 1) {
if (driverContext.isDone()) {
return NOT_BLOCKED;
}
// check if operator is blocked
Operator current = operators.get(0);
ListenableFuture<?> blocked = isBlocked(current);
if (!blocked.isDone()) {
current.getOperatorContext().recordBlocked(blocked);
return blocked;
}
// there is only one operator so just finish it
current.getOperatorContext().startIntervalTimer();
current.finish();
current.getOperatorContext().recordFinish();
return NOT_BLOCKED;
}
boolean movedPage = false;
for (int i = 0; i < operators.size() - 1 && !driverContext.isDone(); i++) {
Operator current = operators.get(i);
Operator next = operators.get(i + 1);
// skip blocked operators
if (!isBlocked(current).isDone()) {
continue;
}
if (!isBlocked(next).isDone()) {
continue;
}
// if the current operator is not finished and next operator needs input...
if (!current.isFinished() && next.needsInput()) {
// get an output page from current operator
current.getOperatorContext().startIntervalTimer();
Page page = current.getOutput();
current.getOperatorContext().recordGetOutput(page);
// if we got an output page, add it to the next operator
if (page != null) {
next.getOperatorContext().startIntervalTimer();
next.addInput(page);
next.getOperatorContext().recordAddInput(page);
movedPage = true;
}
}
// if current operator is finished...
if (current.isFinished()) {
// let next operator know there will be no more data
next.getOperatorContext().startIntervalTimer();
next.finish();
next.getOperatorContext().recordFinish();
}
}
// if we did not move any pages, check if we are blocked
if (!movedPage) {
List<Operator> blockedOperators = new ArrayList<>();
List<ListenableFuture<?>> blockedFutures = new ArrayList<>();
for (Operator operator : operators) {
ListenableFuture<?> blocked = isBlocked(operator);
if (!blocked.isDone()) {
blockedOperators.add(operator);
blockedFutures.add(blocked);
}
}
if (!blockedFutures.isEmpty()) {
// unblock when the first future is complete
ListenableFuture<?> blocked = firstFinishedFuture(blockedFutures);
// driver records serial blocked time
driverContext.recordBlocked(blocked);
// each blocked operator is responsible for blocking the execution
// until one of the operators can continue
for (Operator operator : blockedOperators) {
operator.getOperatorContext().recordBlocked(blocked);
}
return blocked;
}
}
return NOT_BLOCKED;
}
catch (Throwable t) {
driverContext.failed(t);
throw t;
}
}
private void destroyIfNecessary()
{
checkLockHeld("Lock must be held to call destroyIfNecessary");
if (!state.compareAndSet(State.NEED_DESTRUCTION, State.DESTROYED)) {
return;
}
// record the current interrupted status (and clear the flag); we'll reset it later
boolean wasInterrupted = Thread.interrupted();
// if we get an error while closing a driver, record it and we will throw it at the end
Throwable inFlightException = null;
try {
for (Operator operator : operators) {
try {
operator.close();
}
catch (InterruptedException t) {
// don't record the stack
wasInterrupted = true;
}
catch (Throwable t) {
inFlightException = addSuppressedException(
inFlightException,
t,
"Error closing operator %s for task %s",
operator.getOperatorContext().getOperatorId(),
driverContext.getTaskId());
}
try {
operator.getOperatorContext().setMemoryReservation(0);
}
catch (Throwable t) {
inFlightException = addSuppressedException(
inFlightException,
t,
"Error freeing memory for operator %s for task %s",
operator.getOperatorContext().getOperatorId(),
driverContext.getTaskId());
}
try {
operator.getOperatorContext().closeSystemMemoryContext();
}
catch (Throwable t) {
inFlightException = addSuppressedException(
inFlightException,
t,
"Error freeing system memory for operator %s for task %s",
operator.getOperatorContext().getOperatorId(),
driverContext.getTaskId());
}
}
driverContext.finished();
}
catch (Throwable t) {
// this shouldn't happen but be safe
inFlightException = addSuppressedException(
inFlightException,
t,
"Error destroying driver for task %s",
driverContext.getTaskId());
}
finally {
// reset the interrupted flag
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
}
if (inFlightException != null) {
// this will always be an Error or Runtime
throw Throwables.propagate(inFlightException);
}
}
private static ListenableFuture<?> isBlocked(Operator operator)
{
ListenableFuture<?> blocked = operator.isBlocked();
if (blocked.isDone()) {
blocked = operator.getOperatorContext().isWaitingForMemory();
}
return blocked;
}
private static Throwable addSuppressedException(Throwable inFlightException, Throwable newException, String message, Object... args)
{
if (newException instanceof Error) {
if (inFlightException == null) {
inFlightException = newException;
}
else {
// Self-suppression not permitted
if (inFlightException != newException) {
inFlightException.addSuppressed(newException);
}
}
}
else {
// log normal exceptions instead of rethrowing them
log.error(newException, message, args);
}
return inFlightException;
}
private DriverLockResult tryLockAndProcessPendingStateChanges(int timeout, TimeUnit unit)
{
checkLockNotHeld("Can not acquire the driver lock while already holding the driver lock");
return new DriverLockResult(timeout, unit);
}
private synchronized void checkLockNotHeld(String message)
{
checkState(Thread.currentThread() != lockHolder, message);
}
private synchronized void checkLockHeld(String message)
{
checkState(Thread.currentThread() == lockHolder, message);
}
private static ListenableFuture<?> firstFinishedFuture(List<ListenableFuture<?>> futures)
{
SettableFuture<?> result = SettableFuture.create();
ExecutorService executor = MoreExecutors.newDirectExecutorService();
for (ListenableFuture<?> future : futures) {
future.addListener(() -> result.set(null), executor);
}
return result;
}
private class DriverLockResult
implements AutoCloseable
{
private final boolean acquired;
private DriverLockResult(int timeout, TimeUnit unit)
{
acquired = tryAcquire(timeout, unit);
}
private boolean tryAcquire(int timeout, TimeUnit unit)
{
boolean acquired = false;
try {
acquired = exclusiveLock.tryLock(timeout, unit);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (acquired) {
synchronized (Driver.this) {
lockHolder = Thread.currentThread();
}
}
return acquired;
}
public boolean wasAcquired()
{
return acquired;
}
@Override
public void close()
{
if (!acquired) {
return;
}
boolean done = false;
while (!done) {
done = true;
// before releasing the lock, process any new sources and/or destroy the driver
try {
try {
processNewSources();
}
finally {
destroyIfNecessary();
}
}
finally {
synchronized (Driver.this) {
lockHolder = null;
}
exclusiveLock.unlock();
// if new sources were added after we processed them, go around and try again
// in case someone else failed to acquire the lock and as a result won't update them
if (!newSources.isEmpty() && state.get() == State.ALIVE && tryAcquire(0, TimeUnit.MILLISECONDS)) {
done = false;
}
}
}
}
}
}
|
|
/*
* Copyright (c) 2017 Public Library of Science
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.ambraproject.wombat.model;
import com.google.common.collect.ImmutableList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.Serializable;
import java.util.List;
@XmlRootElement(name = "reference")
@XmlAccessorType(XmlAccessType.FIELD)
public class Reference implements Serializable {
private String title;
private String chapterTitle;
private Integer year;
private String journal;
private String fullArticleLink;
private String volume;
private Integer volumeNumber;
private String issue;
private String publisherName;
private String isbn;
private String fPage;
private String lPage;
private String doi;
private String uri;
private ImmutableList<NlmPerson> authors;
private ImmutableList<String> collabAuthors;
private String unStructuredReference;
public static enum PublicationType {
JOURNAL ("journal"),
BOOK ("book"),
OTHER("other");
private String value;
private PublicationType(String publicationTye) {
this.value = publicationTye;
}
public String getValue() {
return value;
}
};
private Reference() {
}
private Reference(Builder builder) {
this.title = builder.title;
this.chapterTitle = builder.chapterTitle;
this.year = builder.year;
this.journal = builder.journal;
this.fullArticleLink = builder.fullArticleLink;
this.volume = builder.volume;
this.volumeNumber = builder.volumeNumber;
this.issue = builder.issue;
this.publisherName = builder.publisherName;
this.isbn = builder.isbn;
this.fPage = builder.fPage;
this.lPage = builder.lPage;
this.doi = builder.doi;
this.uri = builder.uri;
this.authors = ImmutableList.copyOf(builder.authors);
this.collabAuthors = ImmutableList.copyOf(builder.collabAuthors);
this.unStructuredReference = builder.unStructuredReference;
}
public String getTitle() {
return title;
}
public String getChapterTitle() {
return chapterTitle;
}
public Integer getYear() {
return year;
}
public String getJournal() {
return journal;
}
public String getFullArticleLink() {
return fullArticleLink;
}
public String getVolume() {
return volume;
}
public Integer getVolumeNumber() {
return volumeNumber;
}
public String getIssue() {
return issue;
}
public String getPublisherName() {
return publisherName;
}
public String getIsbn() {
return isbn;
}
public String getfPage() {
return fPage;
}
public String getlPage() {
return lPage;
}
public String getDoi() {
return doi;
}
public String getUri() {
return uri;
}
public List<NlmPerson> getAuthors() {
return authors;
}
public List<String> getCollabAuthors() {
return collabAuthors;
}
public String getUnStructuredReference() {
return unStructuredReference;
}
public static Builder build() {
return new Builder();
}
public static class Builder {
private String title;
private String chapterTitle;
private Integer year;
private String journal;
private String fullArticleLink;
private String volume;
private Integer volumeNumber;
private String issue;
private String publisherName;
private String isbn;
private String fPage;
private String lPage;
private String doi;
private String uri;
private List<NlmPerson> authors;
private List<String> collabAuthors;
private String unStructuredReference;
public Builder(){
}
public Builder(Reference reference) {
this.title = reference.title;
this.chapterTitle = reference.chapterTitle;
this.year = reference.year;
this.journal = reference.journal;
this.fullArticleLink = reference.fullArticleLink;
this.volume = reference.volume;
this.volumeNumber = reference.volumeNumber;
this.issue = reference.issue;
this.publisherName = reference.publisherName;
this.isbn = reference.isbn;
this.fPage = reference.fPage;
this.lPage = reference.lPage;
this.doi = reference.doi;
this.uri = reference.uri;
this.authors = ImmutableList.copyOf(reference.authors);
this.collabAuthors = ImmutableList.copyOf(reference.collabAuthors);
this.unStructuredReference = reference.unStructuredReference;
}
public Builder setTitle(String title) {
this.title = title;
return this;
}
public Builder setChapterTitle(String chapterTitle) {
this.chapterTitle = chapterTitle;
return this;
}
public Builder setYear(Integer year) {
this.year = year;
return this;
}
public Builder setJournal(String journal) {
this.journal = journal;
return this;
}
public Builder setFullArticleLink(String fullArticleLink) {
this.fullArticleLink = fullArticleLink;
return this;
}
public Builder setVolume(String volume) {
this.volume = volume;
return this;
}
public Builder setVolumeNumber(Integer volumeNumber) {
this.volumeNumber = volumeNumber;
return this;
}
public Builder setIssue(String issue) {
this.issue = issue;
return this;
}
public Builder setPublisherName(String publisherName) {
this.publisherName = publisherName;
return this;
}
public Builder setIsbn(String isbn) {
this.isbn = isbn;
return this;
}
public Builder setfPage(String fPage) {
this.fPage = fPage;
return this;
}
public Builder setlPage(String lPage) {
this.lPage = lPage;
return this;
}
public Builder setDoi(String doi) {
this.doi = doi;
return this;
}
public Builder setUri(String uri) {
this.uri = uri;
return this;
}
public Builder setAuthors(List<NlmPerson> authors) {
this.authors = authors;
return this;
}
public Builder setCollabAuthors(List<String> collabAuthors) {
this.collabAuthors = collabAuthors;
return this;
}
public Builder setUnStructuredReference(String unStructuredReference) {
this.unStructuredReference = unStructuredReference;
return this;
}
public Reference build() {
return new Reference(this);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Reference reference = (Reference) o;
if (authors != null ? !authors.equals(reference.authors) : reference.authors != null) return false;
if (chapterTitle != null ? !chapterTitle.equals(reference.chapterTitle) : reference.chapterTitle != null)
return false;
if (collabAuthors != null ? !collabAuthors.equals(reference.collabAuthors) : reference.collabAuthors != null)
return false;
if (doi != null ? !doi.equals(reference.doi) : reference.doi != null) return false;
if (fPage != null ? !fPage.equals(reference.fPage) : reference.fPage != null) return false;
if (isbn != null ? !isbn.equals(reference.isbn) : reference.isbn != null) return false;
if (issue != null ? !issue.equals(reference.issue) : reference.issue != null) return false;
if (journal != null ? !journal.equals(reference.journal) : reference.journal != null) return false;
if (lPage != null ? !lPage.equals(reference.lPage) : reference.lPage != null) return false;
if (publisherName != null ? !publisherName.equals(reference.publisherName) : reference.publisherName != null)
return false;
if (title != null ? !title.equals(reference.title) : reference.title != null) return false;
if (unStructuredReference != null ? !unStructuredReference.equals(reference.unStructuredReference) : reference.unStructuredReference != null)
return false;
if (uri != null ? !uri.equals(reference.uri) : reference.uri != null) return false;
if (volume != null ? !volume.equals(reference.volume) : reference.volume != null) return false;
if (volumeNumber != null ? !volumeNumber.equals(reference.volumeNumber) : reference.volumeNumber != null)
return false;
if (year != null ? !year.equals(reference.year) : reference.year != null) return false;
return true;
}
@Override
public int hashCode() {
int result = title != null ? title.hashCode() : 0;
result = 31 * result + (chapterTitle != null ? chapterTitle.hashCode() : 0);
result = 31 * result + (year != null ? year.hashCode() : 0);
result = 31 * result + (journal != null ? journal.hashCode() : 0);
result = 31 * result + (volume != null ? volume.hashCode() : 0);
result = 31 * result + (volumeNumber != null ? volumeNumber.hashCode() : 0);
result = 31 * result + (issue != null ? issue.hashCode() : 0);
result = 31 * result + (publisherName != null ? publisherName.hashCode() : 0);
result = 31 * result + (isbn != null ? isbn.hashCode() : 0);
result = 31 * result + (fPage != null ? fPage.hashCode() : 0);
result = 31 * result + (lPage != null ? lPage.hashCode() : 0);
result = 31 * result + (doi != null ? doi.hashCode() : 0);
result = 31 * result + (uri != null ? uri.hashCode() : 0);
result = 31 * result + (authors != null ? authors.hashCode() : 0);
result = 31 * result + (collabAuthors != null ? collabAuthors.hashCode() : 0);
result = 31 * result + (unStructuredReference != null ? unStructuredReference.hashCode() : 0);
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config;
import org.apache.dubbo.common.Constants;
import org.apache.dubbo.common.compiler.support.AdaptiveCompiler;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.config.support.Parameter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* The application info
*
* @export
*/
public class ApplicationConfig extends AbstractConfig {
private static final long serialVersionUID = 5508512956753757169L;
/**
* Application name
*/
private String name;
/**
* The application version
*/
private String version;
/**
* Application owner
*/
private String owner;
/**
* Application's organization (BU)
*/
private String organization;
/**
* Architecture layer
*/
private String architecture;
/**
* Environment, e.g. dev, test or production
*/
private String environment;
/**
* Java compiler
*/
private String compiler;
/**
* The type of the log access
*/
private String logger;
/**
* Registry centers
*/
private List<RegistryConfig> registries;
private String registryIds;
/**
* Monitor center
*/
private MonitorConfig monitor;
/**
* Is default or not
*/
private Boolean isDefault;
/**
* Directory for saving thread dump
*/
private String dumpDirectory;
/**
* Whether to enable qos or not
*/
private Boolean qosEnable;
/**
* The qos port to listen
*/
private Integer qosPort;
/**
* Should we accept foreign ip or not?
*/
private Boolean qosAcceptForeignIp;
/**
* Customized parameters
*/
private Map<String, String> parameters;
/**
* Config the shutdown.wait
*/
private String shutwait;
private Boolean preferPublicIp;
public ApplicationConfig() {
}
public ApplicationConfig(String name) {
setName(name);
}
@Parameter(key = Constants.APPLICATION_KEY, required = true, useKeyAsProperty = false)
public String getName() {
return name;
}
public void setName(String name) {
checkName(Constants.NAME, name);
this.name = name;
if (StringUtils.isEmpty(id)) {
id = name;
}
}
@Parameter(key = "application.version")
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
checkMultiName(Constants.OWNER, owner);
this.owner = owner;
}
public String getOrganization() {
return organization;
}
public void setOrganization(String organization) {
checkName(Constants.ORGANIZATION, organization);
this.organization = organization;
}
public String getArchitecture() {
return architecture;
}
public void setArchitecture(String architecture) {
checkName(Constants.ARCHITECTURE, architecture);
this.architecture = architecture;
}
public String getEnvironment() {
return environment;
}
public void setEnvironment(String environment) {
checkName(Constants.ENVIRONMENT, environment);
if (environment != null) {
if (!(Constants.DEVELOPMENT_ENVIRONMENT.equals(environment)
|| Constants.TEST_ENVIRONMENT.equals(environment)
|| Constants.PRODUCTION_ENVIRONMENT.equals(environment))) {
throw new IllegalStateException(String.format("Unsupported environment: %s, only support %s/%s/%s, default is %s.",
environment,
Constants.DEVELOPMENT_ENVIRONMENT,
Constants.TEST_ENVIRONMENT,
Constants.PRODUCTION_ENVIRONMENT,
Constants.PRODUCTION_ENVIRONMENT));
}
}
this.environment = environment;
}
public RegistryConfig getRegistry() {
return CollectionUtils.isEmpty(registries) ? null : registries.get(0);
}
public void setRegistry(RegistryConfig registry) {
List<RegistryConfig> registries = new ArrayList<RegistryConfig>(1);
registries.add(registry);
this.registries = registries;
}
public List<RegistryConfig> getRegistries() {
return registries;
}
@SuppressWarnings({"unchecked"})
public void setRegistries(List<? extends RegistryConfig> registries) {
this.registries = (List<RegistryConfig>) registries;
}
@Parameter(excluded = true)
public String getRegistryIds() {
return registryIds;
}
public void setRegistryIds(String registryIds) {
this.registryIds = registryIds;
}
public MonitorConfig getMonitor() {
return monitor;
}
public void setMonitor(String monitor) {
this.monitor = new MonitorConfig(monitor);
}
public void setMonitor(MonitorConfig monitor) {
this.monitor = monitor;
}
public String getCompiler() {
return compiler;
}
public void setCompiler(String compiler) {
this.compiler = compiler;
AdaptiveCompiler.setDefaultCompiler(compiler);
}
public String getLogger() {
return logger;
}
public void setLogger(String logger) {
this.logger = logger;
LoggerFactory.setLoggerAdapter(logger);
}
public Boolean isDefault() {
return isDefault;
}
public void setDefault(Boolean isDefault) {
this.isDefault = isDefault;
}
@Parameter(key = Constants.DUMP_DIRECTORY)
public String getDumpDirectory() {
return dumpDirectory;
}
public void setDumpDirectory(String dumpDirectory) {
this.dumpDirectory = dumpDirectory;
}
@Parameter(key = Constants.QOS_ENABLE)
public Boolean getQosEnable() {
return qosEnable;
}
public void setQosEnable(Boolean qosEnable) {
this.qosEnable = qosEnable;
}
@Parameter(key = Constants.QOS_PORT)
public Integer getQosPort() {
return qosPort;
}
public void setQosPort(Integer qosPort) {
this.qosPort = qosPort;
}
@Parameter(key = Constants.ACCEPT_FOREIGN_IP)
public Boolean getQosAcceptForeignIp() {
return qosAcceptForeignIp;
}
public void setQosAcceptForeignIp(Boolean qosAcceptForeignIp) {
this.qosAcceptForeignIp = qosAcceptForeignIp;
}
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
checkParameterName(parameters);
this.parameters = parameters;
}
public String getShutwait() {
return shutwait;
}
public void setShutwait(String shutwait) {
System.setProperty(Constants.SHUTDOWN_WAIT_KEY, shutwait);
this.shutwait = shutwait;
}
@Override
@Parameter(excluded = true)
public boolean isValid() {
return !StringUtils.isEmpty(name);
}
public Boolean getPreferPublicIp() {
return preferPublicIp;
}
public void setPreferPublicIp(Boolean preferPublicIp) {
this.preferPublicIp = preferPublicIp;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.partition;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.Maps;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.source.extractor.extract.ExtractType;
import org.apache.gobblin.source.extractor.utils.Utils;
import org.apache.gobblin.source.extractor.watermark.DateWatermark;
import org.apache.gobblin.source.extractor.watermark.HourWatermark;
import org.apache.gobblin.source.extractor.watermark.SimpleWatermark;
import org.apache.gobblin.source.extractor.watermark.TimestampWatermark;
import org.apache.gobblin.source.extractor.watermark.WatermarkPredicate;
import org.apache.gobblin.source.extractor.watermark.WatermarkType;
/**
* An implementation of default partitioner for all types of sources
*/
public class Partitioner {
private static final Logger LOG = LoggerFactory.getLogger(Partitioner.class);
public static final String WATERMARKTIMEFORMAT = "yyyyMMddHHmmss";
public static final String HAS_USER_SPECIFIED_PARTITIONS = "partitioner.hasUserSpecifiedPartitions";
public static final String USER_SPECIFIED_PARTITIONS = "partitioner.userSpecifiedPartitions";
public static final Comparator<Partition> ascendingComparator = new Comparator<Partition>() {
@Override
public int compare(Partition p1, Partition p2) {
if (p1 == null && p2 == null) {
return 0;
}
if (p1 == null) {
return -1;
}
if (p2 == null) {
return 1;
}
return Long.compare(p1.getLowWatermark(), p2.getLowWatermark());
}
};
private SourceState state;
/**
* Indicate if the user specifies a high watermark for the current run
*/
@VisibleForTesting
protected boolean hasUserSpecifiedHighWatermark;
public Partitioner(SourceState state) {
super();
this.state = state;
hasUserSpecifiedHighWatermark = false;
}
/**
* Get the global partition of the whole data set, which has the global low and high watermarks
*
* @param previousWatermark previous watermark for computing the low watermark of current run
* @return a Partition instance
*/
public Partition getGlobalPartition(long previousWatermark) {
ExtractType extractType =
ExtractType.valueOf(state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase());
WatermarkType watermarkType = WatermarkType.valueOf(
state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE)
.toUpperCase());
WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType);
int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark();
long lowWatermark = getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark);
long highWatermark = getHighWatermark(extractType, watermarkType);
return new Partition(lowWatermark, highWatermark, true, hasUserSpecifiedHighWatermark);
}
/**
* Get partitions with low and high water marks
*
* @param previousWatermark previous water mark from metadata
* @return map of partition intervals
*/
@Deprecated
public HashMap<Long, Long> getPartitions(long previousWatermark) {
HashMap<Long, Long> defaultPartition = Maps.newHashMap();
if (!isWatermarkExists()) {
defaultPartition.put(ConfigurationKeys.DEFAULT_WATERMARK_VALUE, ConfigurationKeys.DEFAULT_WATERMARK_VALUE);
LOG.info("Watermark column or type not found - Default partition with low watermark and high watermark as "
+ ConfigurationKeys.DEFAULT_WATERMARK_VALUE);
return defaultPartition;
}
ExtractType extractType =
ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase());
WatermarkType watermarkType = WatermarkType.valueOf(
this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE)
.toUpperCase());
int interval =
getUpdatedInterval(this.state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, 0),
extractType, watermarkType);
int sourceMaxAllowedPartitions = this.state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 0);
int maxPartitions = (sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions
: ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS);
WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType);
int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark();
LOG.info("is watermark override: " + this.isWatermarkOverride());
LOG.info("is full extract: " + this.isFullDump());
long lowWatermark = this.getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark);
long highWatermark = this.getHighWatermark(extractType, watermarkType);
if (lowWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE
|| highWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) {
LOG.info(
"Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: "
+ lowWatermark + " and high watermark: " + highWatermark);
defaultPartition.put(lowWatermark, highWatermark);
return defaultPartition;
}
LOG.info("Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark
+ "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions);
return watermark.getPartitions(lowWatermark, highWatermark, interval, maxPartitions);
}
/**
* Get an unordered list of partition with lowWatermark, highWatermark, and hasUserSpecifiedHighWatermark.
*
* @param previousWatermark previous water mark from metadata
* @return an unordered list of partition
*/
public List<Partition> getPartitionList(long previousWatermark) {
if (state.getPropAsBoolean(HAS_USER_SPECIFIED_PARTITIONS)) {
return createUserSpecifiedPartitions();
}
List<Partition> partitions = new ArrayList<>();
/*
* Use the deprecated getPartitions(long) as a helper function, avoid duplicating logic. When it can be removed, its
* logic will be put here.
*/
HashMap<Long, Long> partitionMap = getPartitions(previousWatermark);
/*
* Can't use highWatermark directly, as the partitionMap may have different precision. For example, highWatermark
* may be specified to seconds, but partitionMap could be specified to hour or date.
*/
Long highestWatermark = Collections.max(partitionMap.values());
for (Map.Entry<Long, Long> entry : partitionMap.entrySet()) {
Long partitionHighWatermark = entry.getValue();
// Apply hasUserSpecifiedHighWatermark to the last partition, which has highestWatermark
if (partitionHighWatermark.equals(highestWatermark)) {
partitions.add(new Partition(entry.getKey(), partitionHighWatermark, true, hasUserSpecifiedHighWatermark));
} else {
// The partitionHighWatermark was computed on the fly not what user specifies
partitions.add(new Partition(entry.getKey(), partitionHighWatermark, false));
}
}
return partitions;
}
/**
* Generate the partitions based on the lists specified by the user in job config
*/
private List<Partition> createUserSpecifiedPartitions() {
List<Partition> partitions = new ArrayList<>();
List<String> watermarkPoints = state.getPropAsList(USER_SPECIFIED_PARTITIONS);
if (watermarkPoints == null || watermarkPoints.size() == 0 ) {
LOG.info("There should be some partition points");
long defaultWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
partitions.add(new Partition(defaultWatermark, defaultWatermark, true, true));
return partitions;
}
WatermarkType watermarkType = WatermarkType.valueOf(
state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE)
.toUpperCase());
long lowWatermark = adjustWatermark(watermarkPoints.get(0), watermarkType);
long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
// Only one partition point specified
if (watermarkPoints.size() == 1) {
if (watermarkType != WatermarkType.SIMPLE) {
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE);
String currentTime = Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone);
highWatermark = adjustWatermark(currentTime, watermarkType);
}
partitions.add(new Partition(lowWatermark, highWatermark, true, false));
return partitions;
}
int i;
for (i = 1; i < watermarkPoints.size() - 1; i++) {
highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType);
partitions.add(new Partition(lowWatermark, highWatermark, true));
lowWatermark = highWatermark;
}
// Last partition
highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType);
ExtractType extractType =
ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase());
if (isFullDump() || isSnapshot(extractType)) {
// The upper bounds can be removed for last work unit
partitions.add(new Partition(lowWatermark, highWatermark, true, false));
} else {
// The upper bounds can not be removed for last work unit
partitions.add(new Partition(lowWatermark, highWatermark, true, true));
}
return partitions;
}
/**
* Adjust a watermark based on watermark type
*
* @param baseWatermark the original watermark
* @param watermarkType Watermark Type
* @return the adjusted watermark value
*/
private static long adjustWatermark(String baseWatermark, WatermarkType watermarkType) {
long result = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
switch (watermarkType) {
case SIMPLE:
result = SimpleWatermark.adjustWatermark(baseWatermark, 0);
break;
case DATE:
result = DateWatermark.adjustWatermark(baseWatermark, 0);
break;
case HOUR:
result = HourWatermark.adjustWatermark(baseWatermark, 0);
break;
case TIMESTAMP:
result = TimestampWatermark.adjustWatermark(baseWatermark, 0);
break;
}
return result;
}
/**
* Calculate interval in hours with the given interval
*
* @param inputInterval input interval
* @param extractType Extract type
* @param watermarkType Watermark type
* @return interval in range
*/
private static int getUpdatedInterval(int inputInterval, ExtractType extractType, WatermarkType watermarkType) {
LOG.debug("Getting updated interval");
if ((extractType == ExtractType.SNAPSHOT && watermarkType == WatermarkType.DATE)) {
return inputInterval * 24;
} else if (extractType == ExtractType.APPEND_DAILY) {
return (inputInterval < 1 ? 1 : inputInterval) * 24;
} else {
return inputInterval;
}
}
/**
* Get low water mark
*
* @param extractType Extract type
* @param watermarkType Watermark type
* @param previousWatermark Previous water mark
* @param deltaForNextWatermark delta number for next water mark
* @return low water mark
*/
@VisibleForTesting
protected long getLowWatermark(ExtractType extractType, WatermarkType watermarkType, long previousWatermark,
int deltaForNextWatermark) {
long lowWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
if (this.isFullDump() || this.isWatermarkOverride()) {
String timeZone =
this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE);
/*
* SOURCE_QUERYBASED_START_VALUE could be:
* - a simple string, e.g. "12345"
* - a timestamp string, e.g. "20140101000000"
* - a string with a time directive, e.g. "CURRENTDAY-X", "CURRENTHOUR-X", (X is a number)
*/
lowWatermark =
Utils.getLongWithCurrentDate(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE), timeZone);
LOG.info("Overriding low water mark with the given start value: " + lowWatermark);
} else {
if (isSnapshot(extractType)) {
lowWatermark = this.getSnapshotLowWatermark(watermarkType, previousWatermark, deltaForNextWatermark);
} else {
lowWatermark = this.getAppendLowWatermark(watermarkType, previousWatermark, deltaForNextWatermark);
}
}
return (lowWatermark == 0 ? ConfigurationKeys.DEFAULT_WATERMARK_VALUE : lowWatermark);
}
/**
* Get low water mark
*
* @param watermarkType Watermark type
* @param previousWatermark Previous water mark
* @param deltaForNextWatermark delta number for next water mark
* @return snapshot low water mark
*/
private long getSnapshotLowWatermark(WatermarkType watermarkType, long previousWatermark, int deltaForNextWatermark) {
LOG.debug("Getting snapshot low water mark");
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE);
if (isPreviousWatermarkExists(previousWatermark)) {
if (isSimpleWatermark(watermarkType)) {
return previousWatermark + deltaForNextWatermark - this.state
.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0);
}
DateTime wm = Utils.toDateTime(previousWatermark, WATERMARKTIMEFORMAT, timeZone).plusSeconds(
(deltaForNextWatermark - this.state
.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0)));
return Long.parseLong(Utils.dateTimeToString(wm, WATERMARKTIMEFORMAT, timeZone));
}
// If previous watermark is not found, override with the start value
// (irrespective of source.is.watermark.override flag)
long startValue =
Utils.getLongWithCurrentDate(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE), timeZone);
LOG.info("Overriding low water mark with the given start value: " + startValue);
return startValue;
}
/**
* Get low water mark
*
* @param watermarkType Watermark type
* @param previousWatermark Previous water mark
* @param deltaForNextWatermark delta number for next water mark
* @return append low water mark
*/
private long getAppendLowWatermark(WatermarkType watermarkType, long previousWatermark, int deltaForNextWatermark) {
LOG.debug("Getting append low water mark");
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE);
if (isPreviousWatermarkExists(previousWatermark)) {
if (isSimpleWatermark(watermarkType)) {
return previousWatermark + deltaForNextWatermark;
}
DateTime wm =
Utils.toDateTime(previousWatermark, WATERMARKTIMEFORMAT, timeZone).plusSeconds(deltaForNextWatermark);
return Long.parseLong(Utils.dateTimeToString(wm, WATERMARKTIMEFORMAT, timeZone));
}
LOG.info("Overriding low water mark with start value: " + ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE);
return Utils.getLongWithCurrentDate(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE), timeZone);
}
/**
* Get high water mark
*
* @param extractType Extract type
* @param watermarkType Watermark type
* @return high water mark
*/
@VisibleForTesting
protected long getHighWatermark(ExtractType extractType, WatermarkType watermarkType) {
LOG.debug("Getting high watermark");
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE);
long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
if (this.isWatermarkOverride()) {
highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0);
if (highWatermark == 0) {
highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone));
} else {
// User specifies SOURCE_QUERYBASED_END_VALUE
hasUserSpecifiedHighWatermark = true;
}
LOG.info("Overriding high water mark with the given end value:" + highWatermark);
} else {
if (isSnapshot(extractType)) {
highWatermark = this.getSnapshotHighWatermark(watermarkType);
} else {
highWatermark = this.getAppendHighWatermark(extractType);
}
}
return (highWatermark == 0 ? ConfigurationKeys.DEFAULT_WATERMARK_VALUE : highWatermark);
}
/**
* Get snapshot high water mark
*
* @param watermarkType Watermark type
* @return snapshot high water mark
*/
private long getSnapshotHighWatermark(WatermarkType watermarkType) {
LOG.debug("Getting snapshot high water mark");
if (isSimpleWatermark(watermarkType)) {
return ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
}
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE);
return Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone));
}
/**
* Get append high water mark
*
* @param extractType Extract type
* @return append high water mark
*/
private long getAppendHighWatermark(ExtractType extractType) {
LOG.debug("Getting append high water mark");
if (this.isFullDump()) {
LOG.info("Overriding high water mark with end value:" + ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE);
long highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0);
if (highWatermark != 0) {
// User specifies SOURCE_QUERYBASED_END_VALUE
hasUserSpecifiedHighWatermark = true;
}
return highWatermark;
}
return this.getAppendWatermarkCutoff(extractType);
}
/**
* Get cutoff for high water mark
*
* @param extractType Extract type
* @return cutoff
*/
private long getAppendWatermarkCutoff(ExtractType extractType) {
LOG.debug("Getting append water mark cutoff");
long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE);
AppendMaxLimitType limitType = getAppendLimitType(extractType,
this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT));
if (limitType == null) {
LOG.debug("Limit type is not found");
return highWatermark;
}
int limitDelta =
getAppendLimitDelta(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT));
// if it is CURRENTDATE or CURRENTHOUR then high water mark is current time
if (limitDelta == 0) {
highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone));
}
// if CURRENTDATE or CURRENTHOUR has offset then high water mark is end of day of the given offset
else {
int seconds = 3599; // x:59:59
String format = null;
switch (limitType) {
case CURRENTDATE:
format = "yyyyMMdd";
limitDelta = limitDelta * 24 * 60 * 60;
seconds = 86399; // 23:59:59
break;
case CURRENTHOUR:
format = "yyyyMMddHH";
limitDelta = limitDelta * 60 * 60;
seconds = 3599; // x:59:59
break;
case CURRENTMINUTE:
format = "yyyyMMddHHmm";
limitDelta = limitDelta * 60;
seconds = 59;
break;
case CURRENTSECOND:
format = "yyyyMMddHHmmss";
seconds = 0;
break;
default:
break;
}
DateTime deltaTime = getCurrentTime(timeZone).minusSeconds(limitDelta);
DateTime previousTime =
Utils.toDateTime(Utils.dateTimeToString(deltaTime, format, timeZone), format, timeZone).plusSeconds(seconds);
highWatermark = Long.parseLong(Utils.dateTimeToString(previousTime, WATERMARKTIMEFORMAT, timeZone));
// User specifies SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT
hasUserSpecifiedHighWatermark = true;
}
return highWatermark;
}
/**
* Get append max limit type from the input
*
* @param extractType Extract type
* @param maxLimit
* @return Max limit type
*/
private static AppendMaxLimitType getAppendLimitType(ExtractType extractType, String maxLimit) {
LOG.debug("Getting append limit type");
AppendMaxLimitType limitType;
switch (extractType) {
case APPEND_DAILY:
limitType = AppendMaxLimitType.CURRENTDATE;
break;
case APPEND_HOURLY:
limitType = AppendMaxLimitType.CURRENTHOUR;
break;
default:
limitType = null;
break;
}
if (!Strings.isNullOrEmpty(maxLimit)) {
LOG.debug("Getting append limit type from the config");
String[] limitParams = maxLimit.split("-");
if (limitParams.length >= 1) {
limitType = AppendMaxLimitType.valueOf(limitParams[0]);
}
}
return limitType;
}
/**
* Get append max limit delta num
*
* @param maxLimit
* @return Max limit delta number
*/
private static int getAppendLimitDelta(String maxLimit) {
LOG.debug("Getting append limit delta");
int limitDelta = 0;
if (!Strings.isNullOrEmpty(maxLimit)) {
String[] limitParams = maxLimit.split("-");
if (limitParams.length >= 2) {
limitDelta = Integer.parseInt(limitParams[1]);
}
}
return limitDelta;
}
/**
* true if previous water mark equals default water mark
*
* @param previousWatermark previous water mark
* @return true if previous water mark exists
*/
private static boolean isPreviousWatermarkExists(long previousWatermark) {
if (!(previousWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE)) {
return true;
}
return false;
}
/**
* true if water mark columns and water mark type provided
*
* @return true if water mark exists
*/
private boolean isWatermarkExists() {
if (!Strings.isNullOrEmpty(this.state.getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY)) && !Strings
.isNullOrEmpty(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE))) {
return true;
}
return false;
}
private static boolean isSnapshot(ExtractType extractType) {
if (extractType == ExtractType.SNAPSHOT) {
return true;
}
return false;
}
private static boolean isSimpleWatermark(WatermarkType watermarkType) {
if (watermarkType == WatermarkType.SIMPLE) {
return true;
}
return false;
}
/**
* @return full dump or not
*/
public boolean isFullDump() {
return Boolean.valueOf(this.state.getProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY));
}
/**
* @return full dump or not
*/
public boolean isWatermarkOverride() {
return Boolean.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE));
}
/**
* This thin function is introduced to facilitate testing, a way to mock current time
*
* @return current time in the given timeZone
*/
@VisibleForTesting
public DateTime getCurrentTime(String timeZone) {
return Utils.getCurrentTime(timeZone);
}
}
|
|
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.tools.devsvr.manager.api;
import io.apiman.manager.api.beans.apps.ApplicationBean;
import io.apiman.manager.api.beans.apps.ApplicationStatus;
import io.apiman.manager.api.beans.apps.ApplicationVersionBean;
import io.apiman.manager.api.beans.gateways.GatewayBean;
import io.apiman.manager.api.beans.gateways.GatewayType;
import io.apiman.manager.api.beans.idm.PermissionType;
import io.apiman.manager.api.beans.idm.RoleBean;
import io.apiman.manager.api.beans.idm.RoleMembershipBean;
import io.apiman.manager.api.beans.orgs.OrganizationBean;
import io.apiman.manager.api.beans.plans.PlanBean;
import io.apiman.manager.api.beans.plans.PlanStatus;
import io.apiman.manager.api.beans.plans.PlanVersionBean;
import io.apiman.manager.api.beans.policies.PolicyDefinitionBean;
import io.apiman.manager.api.beans.policies.PolicyDefinitionTemplateBean;
import io.apiman.manager.api.beans.services.EndpointType;
import io.apiman.manager.api.beans.services.ServiceBean;
import io.apiman.manager.api.beans.services.ServiceGatewayBean;
import io.apiman.manager.api.beans.services.ServicePlanBean;
import io.apiman.manager.api.beans.services.ServiceStatus;
import io.apiman.manager.api.beans.services.ServiceVersionBean;
import io.apiman.manager.api.core.IIdmStorage;
import io.apiman.manager.api.core.IStorage;
import io.apiman.manager.api.core.exceptions.StorageException;
import io.apiman.manager.test.server.DefaultTestDataSeeder;
import java.util.Date;
import java.util.HashSet;
/**
* Data seeder used for the dtgov dt api dev server.
*
* @author eric.wittmann@redhat.com
*/
@SuppressWarnings("nls")
public class ManagerApiDataSeeder extends DefaultTestDataSeeder {
/**
* Constructor.
*/
public ManagerApiDataSeeder() {
}
/**
* @see io.apiman.manager.test.server.DefaultTestDataSeeder#seed(io.apiman.manager.api.core.IIdmStorage, io.apiman.manager.api.core.IStorage)
*/
@Override
public void seed(IIdmStorage idmStorage, IStorage storage) throws StorageException {
super.seed(idmStorage, storage);
GatewayBean gateway = new GatewayBean();
gateway.setId("TheGateway");
gateway.setName("The Gateway");
gateway.setDescription("The only gateway needed for testing.");
gateway.setConfiguration("{ \"endpoint\" : \"http://localhost:6666/api/\", \"username\" : \"admin\", \"password\" : \"admin\" }");
gateway.setType(GatewayType.REST);
gateway.setCreatedBy("admin");
gateway.setCreatedOn(new Date());
gateway.setModifiedBy("admin");
gateway.setModifiedOn(new Date());
storage.beginTx();
storage.createGateway(gateway);
storage.commitTx();
// Create Organization Owner role
RoleBean role = new RoleBean();
role.setId("OrganizationOwner");
role.setName("Organization Owner");
role.setAutoGrant(true);
role.setDescription("This role is automatically given to users when they create an organization. It grants all permissions.");
role.setCreatedBy("admin");
role.setCreatedOn(new Date());
role.setPermissions(new HashSet<PermissionType>());
role.getPermissions().add(PermissionType.orgView);
role.getPermissions().add(PermissionType.orgEdit);
role.getPermissions().add(PermissionType.orgAdmin);
role.getPermissions().add(PermissionType.appView);
role.getPermissions().add(PermissionType.appEdit);
role.getPermissions().add(PermissionType.appAdmin);
role.getPermissions().add(PermissionType.planView);
role.getPermissions().add(PermissionType.planEdit);
role.getPermissions().add(PermissionType.planAdmin);
role.getPermissions().add(PermissionType.svcView);
role.getPermissions().add(PermissionType.svcEdit);
role.getPermissions().add(PermissionType.svcAdmin);
idmStorage.createRole(role);
// Create Application Developer role
role = new RoleBean();
role.setId("ApplicationDeveloper");
role.setName("Application Developer");
role.setDescription("This role allows users to perform standard application development tasks (manage applications but not services or plans).");
role.setCreatedBy("admin");
role.setCreatedOn(new Date());
role.setPermissions(new HashSet<PermissionType>());
role.getPermissions().add(PermissionType.orgView);
role.getPermissions().add(PermissionType.appView);
role.getPermissions().add(PermissionType.appEdit);
role.getPermissions().add(PermissionType.appAdmin);
idmStorage.createRole(role);
// Create Service Developer role
role = new RoleBean();
role.setId("ServiceDeveloper");
role.setName("Service Developer");
role.setDescription("This role allows users to perform standard service development tasks such as managing services and plans.");
role.setCreatedBy("admin");
role.setCreatedOn(new Date());
role.setPermissions(new HashSet<PermissionType>());
role.getPermissions().add(PermissionType.orgView);
role.getPermissions().add(PermissionType.svcView);
role.getPermissions().add(PermissionType.svcEdit);
role.getPermissions().add(PermissionType.svcAdmin);
role.getPermissions().add(PermissionType.planView);
role.getPermissions().add(PermissionType.planEdit);
role.getPermissions().add(PermissionType.planAdmin);
idmStorage.createRole(role);
storage.beginTx();
// Create JBoss Overlord org
OrganizationBean org = new OrganizationBean();
org.setId("JBossOverlord");
org.setName("JBoss Overlord");
org.setDescription("Overlord is the umbrella project that will bring governance to the JBoss SOA Platform and eventually beyond.");
org.setCreatedOn(new Date());
org.setCreatedBy("admin");
org.setModifiedOn(new Date());
org.setModifiedBy("admin");
storage.createOrganization(org);
// Create Apereo Bedework org
org = new OrganizationBean();
org.setId("ApereoBedework");
org.setName("Apereo Bedework");
org.setDescription("Bedework is an open-source enterprise calendar system that supports public, personal, and group calendaring.");
org.setCreatedOn(new Date());
org.setCreatedBy("admin");
org.setModifiedOn(new Date());
org.setModifiedBy("admin");
storage.createOrganization(org);
storage.commitTx();
// Make admin the owner of both orgs
RoleMembershipBean membership = RoleMembershipBean.create("admin", "OrganizationOwner", "JBossOverlord");
membership.setCreatedOn(new Date());
idmStorage.createMembership(membership);
membership = RoleMembershipBean.create("admin", "OrganizationOwner", "ApereoBedework");
membership.setCreatedOn(new Date());
idmStorage.createMembership(membership);
storage.beginTx();
// Create some plans
PlanBean plan = new PlanBean();
plan.setId("Platinum");
plan.setName("Platinum");
plan.setDescription("Provides subscribing applications with full access to the Services in this Organization.");
plan.setOrganization(storage.getOrganization("JBossOverlord"));
plan.setCreatedBy("admin");
plan.setCreatedOn(new Date());
storage.createPlan(plan);
PlanVersionBean pvb = new PlanVersionBean();
pvb.setVersion("1.0");
pvb.setStatus(PlanStatus.Created);
pvb.setPlan(plan);
pvb.setCreatedBy("admin");
pvb.setCreatedOn(new Date());
pvb.setModifiedBy("admin");
pvb.setModifiedOn(new Date());
storage.createPlanVersion(pvb);
plan = new PlanBean();
plan.setId("Gold");
plan.setName("Gold");
plan.setDescription("Provides subscribing applications with full access to a subset of Services. Also allows partial (rate limited) access to the rest.");
plan.setOrganization(storage.getOrganization("JBossOverlord"));
plan.setCreatedBy("admin");
plan.setCreatedOn(new Date());
storage.createPlan(plan);
pvb = new PlanVersionBean();
pvb.setVersion("1.0");
pvb.setStatus(PlanStatus.Created);
pvb.setPlan(plan);
pvb.setCreatedBy("admin");
pvb.setCreatedOn(new Date());
pvb.setModifiedBy("admin");
pvb.setModifiedOn(new Date());
storage.createPlanVersion(pvb);
pvb = new PlanVersionBean();
pvb.setVersion("1.2");
pvb.setStatus(PlanStatus.Created);
pvb.setPlan(plan);
pvb.setCreatedBy("bwayne");
pvb.setCreatedOn(new Date());
pvb.setModifiedBy("bwayne");
pvb.setModifiedOn(new Date());
storage.createPlanVersion(pvb);
storage.commitTx();
storage.beginTx();
// Create some applications
ApplicationBean app = new ApplicationBean();
app.setId("dtgov");
app.setName("dtgov");
app.setDescription("This is the official Git repository for the Governance DTGov project, which is intended to be a part of the JBoss Overlord.");
app.setOrganization(storage.getOrganization("JBossOverlord"));
app.setCreatedBy("admin");
app.setCreatedOn(new Date());
storage.createApplication(app);
ApplicationVersionBean avb = new ApplicationVersionBean();
avb.setVersion("1.0");
avb.setStatus(ApplicationStatus.Created);
avb.setApplication(app);
avb.setCreatedBy("admin");
avb.setCreatedOn(new Date());
avb.setModifiedBy("admin");
avb.setModifiedOn(new Date());
storage.createApplicationVersion(avb);
app = new ApplicationBean();
app.setId("rtgov");
app.setName("rtgov");
app.setDescription("This component provides the infrastructure to capture service activity information and then correlate...");
app.setOrganization(storage.getOrganization("JBossOverlord"));
app.setCreatedBy("admin");
app.setCreatedOn(new Date());
storage.createApplication(app);
avb = new ApplicationVersionBean();
avb.setVersion("1.0");
avb.setStatus(ApplicationStatus.Created);
avb.setApplication(app);
avb.setCreatedBy("admin");
avb.setCreatedOn(new Date());
avb.setModifiedBy("admin");
avb.setModifiedOn(new Date());
storage.createApplicationVersion(avb);
app = new ApplicationBean();
app.setId("gadget-server");
app.setName("gadget-server");
app.setDescription("This is a project that builds on the Apache Shindig as the open social gadget containers.");
app.setOrganization(storage.getOrganization("JBossOverlord"));
app.setCreatedBy("admin");
app.setCreatedOn(new Date());
storage.createApplication(app);
avb = new ApplicationVersionBean();
avb.setVersion("1.0");
avb.setStatus(ApplicationStatus.Created);
avb.setApplication(app);
avb.setCreatedBy("admin");
avb.setCreatedOn(new Date());
avb.setModifiedBy("admin");
avb.setModifiedOn(new Date());
storage.createApplicationVersion(avb);
storage.commitTx();
storage.beginTx();
// Create some services
ServiceBean service = new ServiceBean();
service.setId("s-ramp-api");
service.setName("s-ramp-api");
service.setDescription("Allows S-RAMP repository users to communicate with the repository via an Atom based API.");
service.setOrganization(storage.getOrganization("JBossOverlord"));
service.setCreatedOn(new Date());
service.setCreatedBy("admin");
storage.createService(service);
ServiceVersionBean svb = new ServiceVersionBean();
svb.setGateways(new HashSet<ServiceGatewayBean>());
svb.setPlans(new HashSet<ServicePlanBean>());
svb.setVersion("1.0");
svb.setStatus(ServiceStatus.Ready);
svb.setService(service);
svb.setCreatedBy("admin");
svb.setCreatedOn(new Date());
svb.setModifiedBy("admin");
svb.setModifiedOn(new Date());
svb.setEndpoint("http://localhost:9001/echo/s-ramp-server/");
svb.setEndpointType(EndpointType.rest);
svb.setGateways(new HashSet<ServiceGatewayBean>());
ServiceGatewayBean sgb = new ServiceGatewayBean();
sgb.setGatewayId("TheGateway");
svb.getGateways().add(sgb);
storage.createServiceVersion(svb);
storage.commitTx();
storage.beginTx();
// Create some policy definitions
PolicyDefinitionBean whitelistPolicyDef = new PolicyDefinitionBean();
whitelistPolicyDef.setId("IPWhitelistPolicy");
whitelistPolicyDef.setName("IP Whitelist Policy");
whitelistPolicyDef.setDescription("Only requests that originate from a specified set of valid IP addresses will be allowed through.");
whitelistPolicyDef.setIcon("thumbs-up");
whitelistPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.IPWhitelistPolicy");
PolicyDefinitionTemplateBean templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Only requests that originate from the set of @{ipList.size()} configured IP address(es) will be allowed to invoke the managed service.");
whitelistPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(whitelistPolicyDef);
PolicyDefinitionBean blacklistPolicyDef = new PolicyDefinitionBean();
blacklistPolicyDef.setId("IPBlacklistPolicy");
blacklistPolicyDef.setName("IP Blacklist Policy");
blacklistPolicyDef.setDescription("Only requests that originate from a specified set of valid IP addresses will be allowed through.");
blacklistPolicyDef.setIcon("thumbs-down");
blacklistPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.IPBlacklistPolicy");
templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Requests that originate from the set of @{ipList.size()} configured IP address(es) will be denied access to the managed service.");
blacklistPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(blacklistPolicyDef);
PolicyDefinitionBean basicAuthPolicyDef = new PolicyDefinitionBean();
basicAuthPolicyDef.setId("BASICAuthenticationPolicy");
basicAuthPolicyDef.setName("BASIC Authentication Policy");
basicAuthPolicyDef.setDescription("Enables HTTP BASIC Authentication on a service. Some configuration required.");
basicAuthPolicyDef.setIcon("lock");
basicAuthPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.BasicAuthenticationPolicy");
templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Access to the service is protected by BASIC Authentication through the '@{realm}' authentication realm. @if{forwardIdentityHttpHeader != null}Successfully authenticated requests will forward the authenticated identity to the back end service via the '@{forwardIdentityHttpHeader}' custom HTTP header.@end{}");
basicAuthPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(basicAuthPolicyDef);
PolicyDefinitionBean rateLimitPolicyDef = new PolicyDefinitionBean();
rateLimitPolicyDef.setId("RateLimitingPolicy");
rateLimitPolicyDef.setName("Rate Limiting Policy");
rateLimitPolicyDef.setDescription("Enforces rate configurable request rate limits on a service. This ensures that consumers can't overload a service with too many requests.");
rateLimitPolicyDef.setIcon("sliders");
rateLimitPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.RateLimitingPolicy");
templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Consumers are limited to @{limit} requests per @{granularity} per @{period}.");
rateLimitPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(rateLimitPolicyDef);
PolicyDefinitionBean ignoredResourcesPolicyDef = new PolicyDefinitionBean();
ignoredResourcesPolicyDef.setId("IgnoredResourcesPolicy");
ignoredResourcesPolicyDef.setName("Ignored Resources Policy");
ignoredResourcesPolicyDef.setDescription("Requests satisfying the provided regular expression will be ignored.");
ignoredResourcesPolicyDef.setIcon("eye-slash");
ignoredResourcesPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.IgnoredResourcesPolicy");
templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Requests matching any of the @{pathsToIgnore.size()} regular expressions provided will receive a 404 error code.");
ignoredResourcesPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(ignoredResourcesPolicyDef);
PolicyDefinitionBean authorizationPolicyDef = new PolicyDefinitionBean();
authorizationPolicyDef.setId("AuthorizationPolicy");
authorizationPolicyDef.setName("Authorization Policy");
authorizationPolicyDef.setDescription("Enables fine grained authorization to API resources based on authenticated user roles.");
authorizationPolicyDef.setIcon("users");
authorizationPolicyDef.setPolicyImpl("class:io.apiman.gateway.engine.policies.AuthorizationPolicy");
templateBean = new PolicyDefinitionTemplateBean();
templateBean.setLanguage(null);
templateBean.setTemplate("Appropriate authorization roles are required. There are @{rules.size()} authorization rules defined.");
authorizationPolicyDef.getTemplates().add(templateBean);
storage.createPolicyDefinition(authorizationPolicyDef);
storage.commitTx();
}
}
|
|
package org.hisp.dhis.program;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import com.google.common.collect.Sets;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.BaseNameableObject;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.MergeMode;
import org.hisp.dhis.common.VersionedObject;
import org.hisp.dhis.dataapproval.DataApprovalWorkflow;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataentryform.DataEntryForm;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.program.notification.ProgramNotificationTemplate;
import org.hisp.dhis.programrule.ProgramRule;
import org.hisp.dhis.programrule.ProgramRuleVariable;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.relationship.RelationshipType;
import org.hisp.dhis.schema.annotation.PropertyRange;
import org.hisp.dhis.trackedentity.TrackedEntity;
import org.hisp.dhis.trackedentity.TrackedEntityAttribute;
import org.hisp.dhis.user.UserAuthorityGroup;
import org.hisp.dhis.validation.ValidationCriteria;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* @author Abyot Asalefew
*/
@JacksonXmlRootElement( localName = "program", namespace = DxfNamespaces.DXF_2_0 )
public class Program
extends BaseNameableObject
implements VersionedObject
{
private int version;
private String enrollmentDateLabel;
private String incidentDateLabel;
private Set<OrganisationUnit> organisationUnits = new HashSet<>();
private Set<ProgramStage> programStages = new HashSet<>();
private Set<ValidationCriteria> validationCriteria = new HashSet<>();
private ProgramType programType;
private Boolean displayIncidentDate = true;
private Boolean ignoreOverdueEvents = false;
private List<ProgramTrackedEntityAttribute> programAttributes = new ArrayList<>();
private Set<UserAuthorityGroup> userRoles = new HashSet<>();
private Set<ProgramIndicator> programIndicators = new HashSet<>();
private Set<ProgramRule> programRules = new HashSet<>();
private Set<ProgramRuleVariable> programRuleVariables = new HashSet<>();
private Boolean onlyEnrollOnce = false;
private Set<ProgramNotificationTemplate> notificationTemplates = new HashSet<>();
private Boolean selectEnrollmentDatesInFuture = false;
private Boolean selectIncidentDatesInFuture = false;
private String relationshipText;
private RelationshipType relationshipType;
private Boolean relationshipFromA = false;
private Program relatedProgram;
private Boolean dataEntryMethod = false;
private TrackedEntity trackedEntity;
private DataEntryForm dataEntryForm;
/**
* The CategoryCombo used for data attributes.
*/
private DataElementCategoryCombo categoryCombo;
/**
* Property indicating whether offline storage is enabled for this program
* or not
*/
private boolean skipOffline;
/**
* The approval workflow (if any) for this program.
*/
private DataApprovalWorkflow workflow;
private Boolean displayFrontPageList = false;
/**
* Property indicating whether first stage can appear for data entry on the
* same page with registration
*/
private Boolean useFirstStageDuringRegistration = false;
/**
* Property indicating whether program allows for capturing of coordinates
*/
private Boolean captureCoordinates = false;
/**
* How many days after period is over will this program block creation and modification of events
*/
private int expiryDays;
/**
* The PeriodType indicating the frequency that this program will use to decide on expiry
*/
private PeriodType expiryPeriodType;
/**
* How many days after an event is completed will this program block modification of the event
*/
private int completeEventsExpiryDays;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public Program()
{
}
public Program( String name, String description )
{
this.name = name;
this.description = description;
}
// -------------------------------------------------------------------------
// Logic methods
// -------------------------------------------------------------------------
public void addOrganisationUnit( OrganisationUnit organisationUnit )
{
organisationUnits.add( organisationUnit );
organisationUnit.getPrograms().add( this );
}
public void removeOrganisationUnit( OrganisationUnit organisationUnit )
{
organisationUnits.remove( organisationUnit );
organisationUnit.getPrograms().remove( this );
}
public void updateOrganisationUnits( Set<OrganisationUnit> updates )
{
Set<OrganisationUnit> toRemove = Sets.difference( organisationUnits, updates );
Set<OrganisationUnit> toAdd = Sets.difference( updates, organisationUnits );
toRemove.stream().forEach( u -> u.getPrograms().remove( this ) );
toAdd.stream().forEach( u -> u.getPrograms().add( this ) );
organisationUnits.clear();
organisationUnits.addAll( updates );
}
/**
* Returns the ProgramTrackedEntityAttribute of this Program which contains
* the given TrackedEntityAttribute.
*/
public ProgramTrackedEntityAttribute getAttribute( TrackedEntityAttribute attribute )
{
for ( ProgramTrackedEntityAttribute programAttribute : programAttributes )
{
if ( programAttribute != null && programAttribute.getAttribute().equals( attribute ) )
{
return programAttribute;
}
}
return null;
}
/**
* Returns all data elements which are part of the stages of this program.
*/
public Set<DataElement> getDataElements()
{
Set<DataElement> elements = new HashSet<>();
for ( ProgramStage stage : programStages )
{
elements.addAll( stage.getAllDataElements() );
}
return elements;
}
/**
* Returns data elements which are part of the stages of this program which
* have a legend set and is of numeric value type.
*/
public Set<DataElement> getDataElementsWithLegendSet()
{
return getDataElements().stream().filter( e -> e.hasLegendSet() && e.isNumericType() ).collect( Collectors.toSet() );
}
/**
* Returns TrackedEntityAttributes from ProgramTrackedEntityAttributes. Use
* getAttributes() to access the persisted attribute list.
*/
public List<TrackedEntityAttribute> getTrackedEntityAttributes()
{
List<TrackedEntityAttribute> attributes = new ArrayList<>();
for ( ProgramTrackedEntityAttribute attribute : programAttributes )
{
attributes.add( attribute.getAttribute() );
}
return attributes;
}
/**
* Returns non-confidential TrackedEntityAttributes from ProgramTrackedEntityAttributes. Use
* getAttributes() to access the persisted attribute list.
*/
public List<TrackedEntityAttribute> getNonConfidentialTrackedEntityAttributes()
{
return getTrackedEntityAttributes().stream().filter( a -> !a.isConfidentialBool() ).collect( Collectors.toList() );
}
/**
* Returns TrackedEntityAttributes from ProgramTrackedEntityAttributes which
* have a legend set and is of numeric value type.
*/
public List<TrackedEntityAttribute> getNonConfidentialTrackedEntityAttributesWithLegendSet()
{
return getTrackedEntityAttributes().stream().filter( a -> !a.isConfidentialBool() && a.hasLegendSet() && a.isNumericType() ).collect( Collectors.toList() );
}
public ProgramStage getProgramStageByStage( int stage )
{
int count = 1;
for ( ProgramStage programStage : programStages )
{
if ( count == stage )
{
return programStage;
}
count++;
}
return null;
}
public boolean isSingleProgramStage()
{
return programStages != null && programStages.size() == 1;
}
public boolean hasOrganisationUnit( OrganisationUnit unit )
{
return organisationUnits.contains( unit );
}
@Override
public int increaseVersion()
{
return ++version;
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
@Override
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getVersion()
{
return version;
}
@Override
public void setVersion( int version )
{
this.version = version;
}
@JsonProperty( "organisationUnits" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "organisationUnits", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "organisationUnit", namespace = DxfNamespaces.DXF_2_0 )
public Set<OrganisationUnit> getOrganisationUnits()
{
return organisationUnits;
}
public void setOrganisationUnits( Set<OrganisationUnit> organisationUnits )
{
this.organisationUnits = organisationUnits;
}
@JsonProperty( "programStages" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "programStages", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programStage", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramStage> getProgramStages()
{
return programStages;
}
public void setProgramStages( Set<ProgramStage> programStages )
{
this.programStages = programStages;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@PropertyRange( min = 2 )
public String getEnrollmentDateLabel()
{
return enrollmentDateLabel;
}
public void setEnrollmentDateLabel( String enrollmentDateLabel )
{
this.enrollmentDateLabel = enrollmentDateLabel;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@PropertyRange( min = 2 )
public String getIncidentDateLabel()
{
return incidentDateLabel;
}
public void setIncidentDateLabel( String incidentDateLabel )
{
this.incidentDateLabel = incidentDateLabel;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public ProgramType getProgramType()
{
return programType;
}
public void setProgramType( ProgramType programType )
{
this.programType = programType;
}
@JsonProperty( "validationCriterias" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "validationCriterias", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "validationCriteria", namespace = DxfNamespaces.DXF_2_0 )
public Set<ValidationCriteria> getValidationCriteria()
{
return validationCriteria;
}
public void setValidationCriteria( Set<ValidationCriteria> validationCriteria )
{
this.validationCriteria = validationCriteria;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getDisplayIncidentDate()
{
return displayIncidentDate;
}
public void setDisplayIncidentDate( Boolean displayIncidentDate )
{
this.displayIncidentDate = displayIncidentDate;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getIgnoreOverdueEvents()
{
return ignoreOverdueEvents;
}
public void setIgnoreOverdueEvents( Boolean ignoreOverdueEvents )
{
this.ignoreOverdueEvents = ignoreOverdueEvents;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isRegistration()
{
return programType == ProgramType.WITH_REGISTRATION;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isWithoutRegistration()
{
return programType == ProgramType.WITHOUT_REGISTRATION;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "userRoles", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "userRole", namespace = DxfNamespaces.DXF_2_0 )
public Set<UserAuthorityGroup> getUserRoles()
{
return userRoles;
}
public void setUserRoles( Set<UserAuthorityGroup> userRoles )
{
this.userRoles = userRoles;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "programIndicators", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programIndicator", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramIndicator> getProgramIndicators()
{
return programIndicators;
}
public void setProgramIndicators( Set<ProgramIndicator> programIndicators )
{
this.programIndicators = programIndicators;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "programRules", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programRule", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramRule> getProgramRules()
{
return programRules;
}
public void setProgramRules( Set<ProgramRule> programRules )
{
this.programRules = programRules;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "programRuleVariables", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programRuleVariable", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramRuleVariable> getProgramRuleVariables()
{
return programRuleVariables;
}
public void setProgramRuleVariables( Set<ProgramRuleVariable> programRuleVariables )
{
this.programRuleVariables = programRuleVariables;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getOnlyEnrollOnce()
{
return onlyEnrollOnce;
}
public void setOnlyEnrollOnce( Boolean onlyEnrollOnce )
{
this.onlyEnrollOnce = onlyEnrollOnce;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlProperty( localName = "notificationTemplate", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlElementWrapper( localName = "notificationTemplates", namespace = DxfNamespaces.DXF_2_0 )
public Set<ProgramNotificationTemplate> getNotificationTemplates()
{
return notificationTemplates;
}
public void setNotificationTemplates( Set<ProgramNotificationTemplate> notificationTemplates )
{
this.notificationTemplates = notificationTemplates;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getSelectEnrollmentDatesInFuture()
{
return selectEnrollmentDatesInFuture;
}
public void setSelectEnrollmentDatesInFuture( Boolean selectEnrollmentDatesInFuture )
{
this.selectEnrollmentDatesInFuture = selectEnrollmentDatesInFuture;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getSelectIncidentDatesInFuture()
{
return selectIncidentDatesInFuture;
}
public void setSelectIncidentDatesInFuture( Boolean selectIncidentDatesInFuture )
{
this.selectIncidentDatesInFuture = selectIncidentDatesInFuture;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@PropertyRange( min = 2 )
public String getRelationshipText()
{
return relationshipText;
}
public void setRelationshipText( String relationshipText )
{
this.relationshipText = relationshipText;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public RelationshipType getRelationshipType()
{
return relationshipType;
}
public void setRelationshipType( RelationshipType relationshipType )
{
this.relationshipType = relationshipType;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Program getRelatedProgram()
{
return relatedProgram;
}
public void setRelatedProgram( Program relatedProgram )
{
this.relatedProgram = relatedProgram;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getRelationshipFromA()
{
return relationshipFromA;
}
public void setRelationshipFromA( Boolean relationshipFromA )
{
this.relationshipFromA = relationshipFromA;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getDataEntryMethod()
{
return dataEntryMethod;
}
public void setDataEntryMethod( Boolean dataEntryMethod )
{
this.dataEntryMethod = dataEntryMethod;
}
@JsonProperty( "programTrackedEntityAttributes" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JacksonXmlElementWrapper( localName = "programTrackedEntityAttributes", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "programTrackedEntityAttribute", namespace = DxfNamespaces.DXF_2_0 )
public List<ProgramTrackedEntityAttribute> getProgramAttributes()
{
return programAttributes;
}
public void setProgramAttributes( List<ProgramTrackedEntityAttribute> programAttributes )
{
this.programAttributes = programAttributes;
}
@JsonProperty
@JacksonXmlElementWrapper( localName = "trackedEntity", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "trackedEntity", namespace = DxfNamespaces.DXF_2_0 )
public TrackedEntity getTrackedEntity()
{
return trackedEntity;
}
public void setTrackedEntity( TrackedEntity trackedEntity )
{
this.trackedEntity = trackedEntity;
}
@JsonProperty
@JacksonXmlProperty( localName = "dataEntryForm", namespace = DxfNamespaces.DXF_2_0 )
public DataEntryForm getDataEntryForm()
{
return dataEntryForm;
}
public void setDataEntryForm( DataEntryForm dataEntryForm )
{
this.dataEntryForm = dataEntryForm;
}
@JsonProperty
@JsonSerialize( as = BaseIdentifiableObject.class )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataElementCategoryCombo getCategoryCombo()
{
return categoryCombo;
}
public void setCategoryCombo( DataElementCategoryCombo categoryCombo )
{
this.categoryCombo = categoryCombo;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public DataApprovalWorkflow getWorkflow()
{
return workflow;
}
public void setWorkflow( DataApprovalWorkflow workflow )
{
this.workflow = workflow;
}
/**
* Indicates whether this program has a category combination which is different
* from the default category combination.
*/
public boolean hasCategoryCombo()
{
return categoryCombo != null && !DataElementCategoryCombo.DEFAULT_CATEGORY_COMBO_NAME.equals( categoryCombo.getName() );
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isSkipOffline()
{
return skipOffline;
}
public void setSkipOffline( boolean skipOffline )
{
this.skipOffline = skipOffline;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getDisplayFrontPageList()
{
return displayFrontPageList;
}
public void setDisplayFrontPageList( Boolean displayFrontPageList )
{
this.displayFrontPageList = displayFrontPageList;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getUseFirstStageDuringRegistration()
{
return useFirstStageDuringRegistration;
}
public void setUseFirstStageDuringRegistration( Boolean useFirstStageDuringRegistration )
{
this.useFirstStageDuringRegistration = useFirstStageDuringRegistration;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public Boolean getCaptureCoordinates()
{
return captureCoordinates;
}
public void setCaptureCoordinates( Boolean captureCoordinates )
{
this.captureCoordinates = captureCoordinates;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getExpiryDays() {
return expiryDays;
}
public void setExpiryDays(int expiryDays) {
this.expiryDays = expiryDays;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public PeriodType getExpiryPeriodType() {
return expiryPeriodType;
}
public void setExpiryPeriodType(PeriodType expiryPeriodType) {
this.expiryPeriodType = expiryPeriodType;
}
@JsonProperty
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public int getCompleteEventsExpiryDays() {
return completeEventsExpiryDays;
}
public void setCompleteEventsExpiryDays(int completeEventsExpiryDays) {
this.completeEventsExpiryDays = completeEventsExpiryDays;
}
@Override
public void mergeWith( IdentifiableObject other, MergeMode mergeMode )
{
super.mergeWith( other, mergeMode );
if ( other.getClass().isInstance( this ) )
{
Program program = (Program) other;
version = program.getVersion();
expiryDays = program.getExpiryDays();
completeEventsExpiryDays = program.getCompleteEventsExpiryDays();
if ( mergeMode.isReplace() )
{
enrollmentDateLabel = program.getEnrollmentDateLabel();
incidentDateLabel = program.getIncidentDateLabel();
programType = program.getProgramType();
displayIncidentDate = program.getDisplayIncidentDate();
ignoreOverdueEvents = program.getIgnoreOverdueEvents();
onlyEnrollOnce = program.getOnlyEnrollOnce();
selectEnrollmentDatesInFuture = program.getSelectEnrollmentDatesInFuture();
selectIncidentDatesInFuture = program.getSelectIncidentDatesInFuture();
relationshipText = program.getRelationshipText();
relationshipType = program.getRelationshipType();
relationshipFromA = program.getRelationshipFromA();
relatedProgram = program.getRelatedProgram();
dataEntryMethod = program.getDataEntryMethod();
trackedEntity = program.getTrackedEntity();
useFirstStageDuringRegistration = program.getUseFirstStageDuringRegistration();
categoryCombo = program.getCategoryCombo();
captureCoordinates = program.getCaptureCoordinates();
expiryPeriodType = program.getExpiryPeriodType();
}
else if ( mergeMode.isMerge() )
{
enrollmentDateLabel = program.getEnrollmentDateLabel() == null ? enrollmentDateLabel : program.getEnrollmentDateLabel();
incidentDateLabel = program.getIncidentDateLabel() == null ? incidentDateLabel : program.getIncidentDateLabel();
programType = program.getProgramType() == null ? programType : program.getProgramType();
displayIncidentDate = program.getDisplayIncidentDate() == null ? displayIncidentDate : program.getDisplayIncidentDate();
ignoreOverdueEvents = program.getIgnoreOverdueEvents() == null ? ignoreOverdueEvents : program.getIgnoreOverdueEvents();
onlyEnrollOnce = program.getOnlyEnrollOnce() == null ? onlyEnrollOnce : program.getOnlyEnrollOnce();
selectEnrollmentDatesInFuture = program.getSelectEnrollmentDatesInFuture() == null ? selectEnrollmentDatesInFuture : program.getSelectEnrollmentDatesInFuture();
selectIncidentDatesInFuture = program.getSelectIncidentDatesInFuture() == null ? selectIncidentDatesInFuture : program.getSelectIncidentDatesInFuture();
relationshipText = program.getRelationshipText() == null ? relationshipText : program.getRelationshipText();
relationshipType = program.getRelationshipType() == null ? relationshipType : program.getRelationshipType();
relationshipFromA = program.getRelationshipFromA() == null ? relationshipFromA : program.getRelationshipFromA();
relatedProgram = program.getRelatedProgram() == null ? relatedProgram : program.getRelatedProgram();
dataEntryMethod = program.getDataEntryMethod() == null ? dataEntryMethod : program.getDataEntryMethod();
trackedEntity = program.getTrackedEntity() == null ? trackedEntity : program.getTrackedEntity();
useFirstStageDuringRegistration = program.getUseFirstStageDuringRegistration() == null ? useFirstStageDuringRegistration : program.getUseFirstStageDuringRegistration();
categoryCombo = program.getCategoryCombo() == null ? categoryCombo : program.getCategoryCombo();
captureCoordinates = program.getCaptureCoordinates() == null ? captureCoordinates : program.getCaptureCoordinates();
expiryPeriodType = program.getExpiryPeriodType() == null ? expiryPeriodType : program.getExpiryPeriodType();
}
organisationUnits.clear();
organisationUnits.addAll( program.getOrganisationUnits() );
programStages.clear();
for ( ProgramStage programStage : program.getProgramStages() )
{
programStages.add( programStage );
programStage.setProgram( this );
}
validationCriteria.clear();
validationCriteria.addAll( program.getValidationCriteria() );
programAttributes.clear();
programAttributes.addAll( program.getProgramAttributes() );
userRoles.clear();
userRoles.addAll( program.getUserRoles() );
notificationTemplates.clear();
notificationTemplates.addAll( program.getNotificationTemplates() );
}
}
}
|
|
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
*
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.modelcompiler.builder.generator.expression;
import java.util.Collection;
import java.util.Optional;
import com.github.javaparser.ast.body.Parameter;
import com.github.javaparser.ast.expr.BinaryExpr;
import com.github.javaparser.ast.expr.ClassExpr;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.FieldAccessExpr;
import com.github.javaparser.ast.expr.LambdaExpr;
import com.github.javaparser.ast.expr.LiteralExpr;
import com.github.javaparser.ast.expr.MethodCallExpr;
import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.stmt.ExpressionStmt;
import org.drools.modelcompiler.builder.generator.RuleContext;
import org.drools.modelcompiler.builder.generator.TypedExpression;
import org.drools.modelcompiler.builder.generator.drlxparse.DrlxParseSuccess;
import org.drools.modelcompiler.builder.generator.drlxparse.MultipleDrlxParseSuccess;
import org.drools.modelcompiler.builder.generator.drlxparse.SingleDrlxParseSuccess;
import static java.util.Optional.of;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.THIS_PLACEHOLDER;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.findLastMethodInChain;
import static org.drools.modelcompiler.builder.generator.DrlxParseUtil.toClassOrInterfaceType;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.ALPHA_INDEXED_BY_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.BETA_INDEXED_BY_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.BIND_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.EXPR_AND_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.EXPR_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.EXPR_END_AND_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.EXPR_END_OR_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.EXPR_OR_CALL;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.NO_OP_EXPR;
import static org.drools.modelcompiler.builder.generator.DslMethodNames.REACT_ON_CALL;
import static org.drools.mvelcompiler.util.TypeUtils.toJPType;
public class PatternExpressionBuilder extends AbstractExpressionBuilder {
public PatternExpressionBuilder(RuleContext context) {
super(context);
}
@Override
public void processExpression(SingleDrlxParseSuccess drlxParseResult) {
if (drlxParseResult.hasUnificationVariable()) {
Expression dslExpr = buildUnificationExpression(drlxParseResult);
context.addExpression(dslExpr);
} else if (drlxParseResult.isPredicate()) {
Expression dslExpr = buildExpressionWithIndexing(drlxParseResult);
context.addExpression(dslExpr);
}
if (drlxParseResult.getExprBinding() != null) {
Expression dslExpr = buildBinding(drlxParseResult);
context.addExpression(dslExpr);
}
}
@Override
public MethodCallExpr buildExpressionWithIndexing(DrlxParseSuccess drlxParseResult) {
if (drlxParseResult instanceof MultipleDrlxParseSuccess) {
MultipleDrlxParseSuccess multi = ( MultipleDrlxParseSuccess ) drlxParseResult;
MethodCallExpr exprDSL = new MethodCallExpr(null, multi.getOperator() == BinaryExpr.Operator.OR ? EXPR_OR_CALL : EXPR_AND_CALL );
for (DrlxParseSuccess child : multi.getResults()) {
MethodCallExpr childExpr = buildExpressionWithIndexing(child);
childExpr.setScope( exprDSL );
exprDSL = childExpr;
}
return new MethodCallExpr(exprDSL, multi.getOperator() == BinaryExpr.Operator.OR ? EXPR_END_OR_CALL : EXPR_END_AND_CALL );
}
return buildSingleExpressionWithIndexing((SingleDrlxParseSuccess ) drlxParseResult);
}
private MethodCallExpr buildSingleExpressionWithIndexing(SingleDrlxParseSuccess drlxParseResult) {
String exprId = createExprId(drlxParseResult);
MethodCallExpr exprDSL = new MethodCallExpr(null, EXPR_CALL);
if (exprId != null && !"".equals(exprId)) {
exprDSL.addArgument(new StringLiteralExpr(exprId));
}
exprDSL = buildExpression(drlxParseResult, exprDSL);
MethodCallExpr finalExprDSL = exprDSL;
buildIndexedBy(drlxParseResult).ifPresent(finalExprDSL::addArgument);
buildReactOn(drlxParseResult).ifPresent(finalExprDSL::addArgument);
return exprDSL;
}
private MethodCallExpr buildExpression(SingleDrlxParseSuccess drlxParseResult, MethodCallExpr exprDSL) {
if (drlxParseResult.isTemporal()) {
return buildTemporalExpression(drlxParseResult, exprDSL);
}
MethodCallExpr ooPathPatternExpr = null;
for (String usedDeclarattion : drlxParseResult.getUsedDeclarations()) {
if ( !(drlxParseResult.isSkipThisAsParam() && usedDeclarattion.equals( drlxParseResult.getPatternBinding() ) ) ) {
MethodCallExpr ooPathExpr = context.getOOPathPatternExpr(usedDeclarattion);
if (ooPathExpr == null) {
exprDSL.addArgument(context.getVarExpr(usedDeclarattion));
} else {
ooPathPatternExpr = ooPathExpr;
}
}
}
if (drlxParseResult.getRightLiteral() != null) {
exprDSL.addArgument( "" + drlxParseResult.getRightLiteral() );
}
if (ooPathPatternExpr != null) {
// this constraints belongs to an external oopath pattern so transfer its evaluation there
exprDSL.setScope(ooPathPatternExpr.clone());
// flag the old oopath pattern as a no_op in order to remove it from the generated dsl
findLastMethodInChain(ooPathPatternExpr).setName(NO_OP_EXPR);
drlxParseResult.setSkipThisAsParam(true);
}
exprDSL.addArgument(buildConstraintExpression(drlxParseResult, drlxParseResult.getExpr()));
return exprDSL;
}
private Optional<MethodCallExpr> buildReactOn(SingleDrlxParseSuccess drlxParseResult) {
if (shouldBuildReactOn(drlxParseResult)) {
MethodCallExpr reactOnDSL = new MethodCallExpr(null, REACT_ON_CALL);
drlxParseResult.getReactOnProperties().stream()
.map(StringLiteralExpr::new)
.forEach(reactOnDSL::addArgument);
return of(reactOnDSL);
}
return Optional.empty();
}
@Override
public MethodCallExpr buildBinding(SingleDrlxParseSuccess drlxParseResult) {
sortUsedDeclarations(drlxParseResult);
MethodCallExpr bindDSL = new MethodCallExpr(null, BIND_CALL);
String boundVar = drlxParseResult.hasUnificationVariable() ?
drlxParseResult.getUnificationVariable() :
drlxParseResult.getExprBinding();
bindDSL.addArgument(context.getVarExpr(boundVar));
final Expression constraintExpression = getConstraintExpression(drlxParseResult);
drlxParseResult.getUsedDeclarationsOnLeft().forEach(d -> bindDSL.addArgument(context.getVar(d)));
bindDSL.addArgument(constraintExpression);
final Optional<MethodCallExpr> methodCallExpr = buildReactOn(drlxParseResult);
methodCallExpr.ifPresent(bindDSL::addArgument);
context.registerBindingExpression(boundVar, bindDSL);
return bindDSL;
}
private Optional<MethodCallExpr> buildIndexedBy(SingleDrlxParseSuccess drlxParseResult) {
if (drlxParseResult.isUnification()) {
TypedExpression left = drlxParseResult.getLeft();
TypedExpression right = drlxParseResult.getRight();
LambdaExpr indexedByLeftOperandExtractor = new LambdaExpr();
indexedByLeftOperandExtractor.setEnclosingParameters(true);
boolean leftContainsThis = left.getExpression().toString().contains(THIS_PLACEHOLDER);
TypedExpression typedExpression = leftContainsThis ? left : right;
indexedByLeftOperandExtractor.addParameter(new Parameter(drlxParseResult.getPatternJPType(), THIS_PLACEHOLDER));
indexedByLeftOperandExtractor.setBody(new ExpressionStmt(typedExpression.getExpression()));
MethodCallExpr indexedByDSL = new MethodCallExpr(null, drlxParseResult.isBetaConstraint() ? BETA_INDEXED_BY_CALL : ALPHA_INDEXED_BY_CALL);
indexedByDSL.addArgument(new ClassExpr(toJPType(left.getRawClass())));
indexedByDSL.addArgument(org.drools.model.Index.ConstraintType.class.getCanonicalName() + ".EQUAL");
indexedByDSL.addArgument("-1");
indexedByDSL.addArgument(indexedByLeftOperandExtractor);
indexedByDSL.addArgument("" + null);
return Optional.of(indexedByDSL);
}
if ( !shouldCreateIndex( drlxParseResult ) ) {
return Optional.empty();
}
TypedExpression left = drlxParseResult.getLeft();
TypedExpression right = drlxParseResult.getRight();
boolean isBeta = drlxParseResult.isBetaConstraint();
Expression rightExpression = right.getExpression();
if (!isBeta && !(rightExpression instanceof LiteralExpr || isStringToDateExpression(rightExpression))) {
return Optional.empty();
}
FieldAccessExpr indexedBy_constraintType = new FieldAccessExpr(new NameExpr(org.drools.model.Index.ConstraintType.class.getCanonicalName()), drlxParseResult.getDecodeConstraintType().toString()); // not 100% accurate as the type in "nameExpr" is actually parsed if it was JavaParsers as a big chain of FieldAccessExpr
LambdaExpr indexedBy_leftOperandExtractor = new LambdaExpr();
indexedBy_leftOperandExtractor.setEnclosingParameters(true);
indexedBy_leftOperandExtractor.addParameter(new Parameter(toClassOrInterfaceType(drlxParseResult.getPatternType()), THIS_PLACEHOLDER));
boolean leftContainsThis = left.getExpression().toString().contains(THIS_PLACEHOLDER);
indexedBy_leftOperandExtractor.setBody(new ExpressionStmt(leftContainsThis ? left.getExpression() : right.getExpression()));
MethodCallExpr indexedByDSL = new MethodCallExpr(null, isBeta ? BETA_INDEXED_BY_CALL : ALPHA_INDEXED_BY_CALL);
indexedByDSL.addArgument(new ClassExpr(toJPType(left.getRawClass())));
indexedByDSL.addArgument( indexedBy_constraintType );
indexedByDSL.addArgument( getIndexIdArgument( drlxParseResult, left ) );
indexedByDSL.addArgument(indexedBy_leftOperandExtractor );
Collection<String> usedDeclarations = drlxParseResult.getUsedDeclarations();
java.lang.reflect.Type leftType = left.getType();
if ( drlxParseResult.isBetaConstraint() ) {
addIndexedByDeclaration(left, right, leftContainsThis, indexedByDSL, usedDeclarations);
} else {
indexedByDSL.addArgument( narrowExpressionToType(right, leftType));
}
return Optional.of(indexedByDSL);
}
}
|
|
/*
* Copyright 2015 NEC Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.o3project.odenos.component.linklayerizer;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.o3project.odenos.core.component.ConversionTable;
import org.o3project.odenos.core.component.Logic;
import org.o3project.odenos.core.component.NetworkInterface;
import org.o3project.odenos.core.component.network.flow.Flow;
import org.o3project.odenos.core.component.network.flow.FlowObject;
import org.o3project.odenos.core.component.network.flow.FlowSet;
import org.o3project.odenos.core.component.network.flow.basic.BasicFlow;
import org.o3project.odenos.core.component.network.flow.basic.BasicFlowMatch;
import org.o3project.odenos.core.component.network.flow.basic.FlowAction;
import org.o3project.odenos.core.component.network.flow.basic.FlowActionOutput;
import org.o3project.odenos.core.component.network.packet.InPacket;
import org.o3project.odenos.core.component.network.packet.InPacketAdded;
import org.o3project.odenos.core.component.network.topology.Link;
import org.o3project.odenos.core.component.network.topology.Node;
import org.o3project.odenos.core.component.network.topology.Port;
import org.o3project.odenos.core.component.network.topology.Topology;
import org.o3project.odenos.core.manager.system.ComponentConnection;
import org.o3project.odenos.core.manager.system.ComponentConnectionLogicAndNetwork;
import org.o3project.odenos.core.manager.system.event.ComponentConnectionChanged;
import org.o3project.odenos.remoteobject.RequestParser;
import org.o3project.odenos.remoteobject.message.Request;
import org.o3project.odenos.remoteobject.message.Request.Method;
import org.o3project.odenos.remoteobject.message.Response;
import org.o3project.odenos.remoteobject.messagingclient.MessageDispatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* LinkLayerizer integrates networks of different layers, to create a multi-layer network.
*
*/
public class LinkLayerizer extends Logic {
/** logger. */
private static final Logger logger = LoggerFactory.getLogger(LinkLayerizer.class);
public static final String LAYERIZED_NETWORK = "layerized";
public static final String UPPER_NETWORK = "upper";
public static final String LOWER_NETWORK = "lower";
/** name separator for ConversionTable. */
public static final String SEPARATOR = "::";
/** RequestPaeser Instance. */
protected final RequestParser<IActionCallback> parser;
/** Description of Component. */
private static final String DESCRIPTION = "LinkLayerizer Component";
/** Flag that is link synchronization in upper network. */
protected boolean upperLinkSync = true; /* default true */
/** Boundary Table. */
protected LinkLayerizerBoundaryTable linkLayerizerBoundaryTable;
/** LinkLayerizerOnFlow instance. */
protected LinkLayerizerOnFlow linkLayerizerOnFlow;
/**
* Constructor.
*
* @param objectId ID for object.
* @param dispatcher Message dispatcher instance.
* @throws Exception if parameter is wrong.
*/
public LinkLayerizer(String objectId, MessageDispatcher dispatcher)
throws Exception {
super(objectId, dispatcher);
parser = createParser();
linkLayerizerBoundaryTable = new LinkLayerizerBoundaryTable();
linkLayerizerOnFlow = new LinkLayerizerOnFlow(
conversionTable(),
networkInterfaces(),
linkLayerizerBoundaryTable);
}
/*
* (non-Javadoc)
* @see org.o3project.odenos.component.Component#getSuperType()
*/
@Override
protected String getSuperType() {
return LinkLayerizer.class.getSimpleName();
}
/*
* (non-Javadoc)
* @see org.o3project.odenos.component.Component#getDescription()
*/
@Override
protected String getDescription() {
return DESCRIPTION;
}
/**
* Get Connection Types of Component.
*
* @return Connection Types
*/
@Override
protected String getConnectionTypes() {
// <connection type>:<connection number>,...
return String.format("%s:1,%s:1,%s:1", LAYERIZED_NETWORK, UPPER_NETWORK,
LOWER_NETWORK);
}
/**
* Returns status UpperLinkSync.
* @return upperLinkisync
*/
public boolean isUpperLinkisync() {
return upperLinkSync;
}
/**
* Sets flag UpperLinkSync.
* @param upperLinkisync status UpperLinkSync.
*/
public void setUpperLinkisync(boolean upperLinkisync) {
this.upperLinkSync = upperLinkisync;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedAddedPre(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected boolean onConnectionChangedAddedPre(
ComponentConnectionChanged message) {
logger.debug("");
ComponentConnection curr = message.curr();
if (!ComponentConnectionLogicAndNetwork.TYPE.equals(curr
.getObjectType())) {
return false;
}
String logicId = curr
.getProperty(ComponentConnectionLogicAndNetwork.LOGIC_ID);
if (!getObjectId().equals(logicId)) {
return false;
}
boolean exist = false;
String connectionType = curr.getConnectionType();
switch (connectionType) {
case LOWER_NETWORK:
exist = isConnectionType(LOWER_NETWORK);
break;
case UPPER_NETWORK:
exist = isConnectionType(UPPER_NETWORK);
break;
case LAYERIZED_NETWORK:
exist = isConnectionType(LAYERIZED_NETWORK);
break;
default:
/* unknown type */
exist = true;
}
if (exist) {
String status = ComponentConnection.State.ERROR;
curr.setConnectionState(status);
systemMngInterface().putConnection(curr);
return false;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedUpdatePre(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected boolean onConnectionChangedUpdatePre(
ComponentConnectionChanged message) {
logger.debug("");
ComponentConnection curr = message.curr();
if (!ComponentConnectionLogicAndNetwork.TYPE.equals(curr
.getObjectType())) {
return false;
}
String logicId = curr
.getProperty(ComponentConnectionLogicAndNetwork.LOGIC_ID);
if (!getObjectId().equals(logicId)) {
return false;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedDeletePre(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected boolean onConnectionChangedDeletePre(
ComponentConnectionChanged message) {
logger.debug("");
ComponentConnection curr = message.curr();
if (!ComponentConnectionLogicAndNetwork.TYPE.equals(curr
.getObjectType())) {
return false;
}
String logicId = curr
.getProperty(ComponentConnectionLogicAndNetwork.LOGIC_ID);
if (!getObjectId().equals(logicId)) {
return false;
}
boolean exist = false;
String connectionType = curr.getConnectionType();
switch (connectionType) {
case LOWER_NETWORK:
exist = isConnectionType(LOWER_NETWORK);
break;
case UPPER_NETWORK:
exist = isConnectionType(UPPER_NETWORK);
break;
case LAYERIZED_NETWORK:
exist = isConnectionType(LAYERIZED_NETWORK);
break;
default:
/* unknown type */
exist = false;
}
if (exist == false) {
String status = ComponentConnection.State.ERROR;
curr.setConnectionState(status);
systemMngInterface().putConnection(curr);
return false;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedAdded(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected void onConnectionChangedAdded(ComponentConnectionChanged message) {
logger.debug("");
ComponentConnection curr = message.curr();
String networkId = curr
.getProperty(ComponentConnectionLogicAndNetwork.NETWORK_ID);
String connectionType = curr.getConnectionType();
conversionTable().addEntryConnectionType(networkId, connectionType);
curr.setConnectionState(ComponentConnection.State.RUNNING);
/*
* not register lower_nw to conversionTable
*/
switch (connectionType) {
case LOWER_NETWORK:
subscribeLower(networkId);
doOnConnectionChangedAddedLower(networkId);
break;
case UPPER_NETWORK:
subscribeUpper(networkId);
ArrayList<String> layerizeds =
conversionTable().getConnectionList(LAYERIZED_NETWORK);
if (layerizeds.size() == 1) {
// Update conversionTable.
conversionTable().addEntryNetwork(layerizeds.get(0), networkId);
doOnConnectionChangedAddedUpper(networkId);
}
break;
case LAYERIZED_NETWORK:
subscribeLayerized(networkId);
ArrayList<String> uppers =
conversionTable().getConnectionList(UPPER_NETWORK);
if (uppers.size() == 1) {
// Update conversionTable.
conversionTable().addEntryNetwork(uppers.get(0), networkId);
doOnConnectionChangedAddedLayerized(networkId);
}
break;
default:
String errorMessage = "unknown type: " + connectionType;
logger.error(errorMessage);
throw new IllegalArgumentException(errorMessage);
}
// Changed ConectionProperty's status.
systemMngInterface().putConnection(curr);
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedUpdate(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected void onConnectionChangedUpdate(ComponentConnectionChanged message) {
logger.debug("");
/*
* do nothing
*/
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onConnectionChangedDelete(org.o3project.odenos.manager.system.event.ComponentConnectionChanged)
*/
@Override
protected void onConnectionChangedDelete(ComponentConnectionChanged message) {
logger.debug("");
ComponentConnection curr = message.curr();
// Changed ConectionProperty's status.
curr.setConnectionState(ComponentConnection.State.FINALIZING);
systemMngInterface().putConnection(curr);
String networkId = curr
.getProperty(ComponentConnectionLogicAndNetwork.NETWORK_ID);
String connectionType = curr.getConnectionType();
switch (connectionType) {
case LOWER_NETWORK:
doOnConnectionChangedDeleteLower(networkId);
break;
case UPPER_NETWORK:
doOnConnectionChangedDeleteUpper(networkId);
break;
case LAYERIZED_NETWORK:
doOnConnectionChangedDeleteLayerized(networkId);
break;
default:
logger.error("unknown type: " + connectionType);
return;
}
// reset conversionTable.
conversionTable().delEntryConnectionType(networkId);
conversionTable().delEntryNetwork(networkId);
// Changed ConectionProperty's status.
curr.setConnectionState(ComponentConnection.State.NONE);
systemMngInterface().putConnection(curr);
}
protected void doOnConnectionChangedAddedLower(String lowerId) {
logger.debug("");
List<String> layerizedIds = getLayerizedNetworkIds();
if (CollectionUtils.isNotEmpty(layerizedIds)) {
NetworkInterface layerizedIf = getNetworkIf(layerizedIds);
Map<String, List<String>> lowerFlows =
linkLayerizerOnFlow.getLowerFlows();
Set<String> linkSet = lowerFlows.keySet();
for (String linkId : linkSet) {
layerizedIf.delLink(linkId);
lowerFlows.remove(linkId);
}
setBoundaryPortAttr();
}
List<String> upperIds = getUpperNetworkIds();
if (CollectionUtils.isNotEmpty(upperIds)) {
NetworkInterface upperIf = getNetworkIf(upperIds);
FlowSet flowSet = upperIf.getFlowSet(); // TODO null check
for (Flow flow : flowSet.getFlows().values()) {
if (!(flow instanceof BasicFlow)) {
continue;
}
BasicFlow basicFlow = (BasicFlow) flow;
linkLayerizerOnFlow.flowAddedLowerNw(
lowerId, basicFlow);
}
}
}
protected void doOnConnectionChangedAddedUpper(String upperId) {
logger.debug("");
List<String> layerizedIds = getLayerizedNetworkIds();
if (CollectionUtils.isNotEmpty(layerizedIds)) {
NetworkInterface layerizedNwif = getNetworkIf(layerizedIds);
layerizedNwif.deleteTopology();
setBoundaryPortAttr();
}
List<String> lowerIds = getLowerNetworkIds();
if (CollectionUtils.isNotEmpty(lowerIds)) {
NetworkInterface lowerNwif = getNetworkIf(lowerIds);
FlowSet flowSet = lowerNwif.getFlowSet();
Map<String, Flow> flowMap = flowSet.getFlows();
Collection<Flow> flows = flowMap.values();
for (Flow flow : flows) {
if (!(flow instanceof BasicFlow)) {
continue;
}
BasicFlow basicFlow = (BasicFlow) flow;
linkLayerizerOnFlow.flowAddedLowerNw(
lowerNwif.getNetworkId(), basicFlow);
}
}
}
protected void doOnConnectionChangedAddedLayerized(String layeriedId) {
logger.debug("");
List<String> upperIds = getUpperNetworkIds();
if (CollectionUtils.isNotEmpty(upperIds)) {
NetworkInterface upperNwif = getNetworkIf(upperIds);
linkLayerizerOnFlow.getLowerFlows().clear();
linkLayerizerOnFlow.getLayerizedLinks().clear();
Topology topology = upperNwif.getTopology();
NetworkInterface layerizedNwif = networkInterfaces()
.get(layeriedId);
Map<String, Node> nodes = topology.getNodeMap();
for (Node node : nodes.values()) {
layerizedNwif.putNode(node);
Map<String, Port> ports = node.getPortMap();
for (Port port : ports.values()) {
layerizedNwif.putPort(port);
}
}
Map<String, Link> links = topology.getLinkMap();
for (Link link : links.values()) {
layerizedNwif.putLink(link);
}
}
List<String> lowerIds = getLowerNetworkIds();
if (CollectionUtils.isNotEmpty(lowerIds)) {
NetworkInterface lowerNwif = getNetworkIf(lowerIds);
FlowSet flowSet = lowerNwif.getFlowSet();
Map<String, Flow> flowMap = flowSet.getFlows();
Collection<Flow> flows = flowMap.values();
for (Flow flow : flows) {
if (!(flow instanceof BasicFlow)) {
continue;
}
BasicFlow basicFlow = (BasicFlow) flow;
linkLayerizerOnFlow.flowAddedLowerNw(
lowerNwif.getNetworkId(), basicFlow);
}
}
}
protected void doOnConnectionChangedDeleteLower(String lowerId) {
logger.debug("");
unsubscribeLower(lowerId);
NetworkInterface layerizedIf = getLayerizedNetworkIf();
if (layerizedIf == null) {
return;
}
// delete layerized's boundary link.
for (String linkId : linkLayerizerOnFlow.getLowerFlows().keySet()) {
layerizedIf.delLink(linkId);
}
// reset layerizedLinks & lowerFlows.
linkLayerizerOnFlow.getLayerizedLinks().clear();
linkLayerizerOnFlow.getLowerFlows().clear();
}
protected void doOnConnectionChangedDeleteUpper(String upperId) {
logger.debug("");
unsubscribeUpper(upperId);
NetworkInterface upperIf = getUpperNetworkIf();
// delete upper's boundary link
for (String linkId : linkLayerizerOnFlow.getLowerFlows().keySet()) {
upperIf.delLink(linkId);
}
// delete upper flows.
upperIf.deleteAllFlow();
NetworkInterface layerizedIf = getLayerizedNetworkIf();
if (layerizedIf == null) {
return;
}
// update layerized flow's status.
layerizedIf.putStatusFaildAllFlow();
// delete layerized's topology.
Map<String, Node> nodes = layerizedIf.getNodes();
for (Node node : nodes.values()) {
String nodeId = node.getId();
Map<String, Port> ports = node.getPortMap();
for (Port port : ports.values()) {
String portId = port.getId();
layerizedIf.delPort(nodeId, portId);
}
layerizedIf.delNode(nodeId);
}
Map<String, Link> links = layerizedIf.getLinks();
for (Link link : links.values()) {
String linkId = link.getId();
layerizedIf.delLink(linkId);
}
// reset conversionTable.
conversionTable().getLink().clear();
conversionTable().getPort().clear();
conversionTable().getNode().clear();
conversionTable().getFlow().clear();
// reset layerizedLinks & lowerFlows
linkLayerizerOnFlow.getLayerizedLinks().clear();
linkLayerizerOnFlow.getLowerFlows().clear();
}
protected void doOnConnectionChangedDeleteLayerized(String layerizedId) {
logger.debug("");
unsubscribeLayerized(layerizedId);
// delete layerized's topology
NetworkInterface layerizedIf = getLayerizedNetworkIf();
Map<String, Node> nodes = layerizedIf.getNodes();
for (Node node : nodes.values()) {
String nodeId = node.getId();
Map<String, Port> ports = node.getPortMap();
for (Port port : ports.values()) {
String portId = port.getId();
layerizedIf.delPort(nodeId, portId);
}
layerizedIf.delNode(nodeId);
}
Map<String, Link> links = layerizedIf.getLinks();
for (Link link : links.values()) {
String linkId = link.getId();
layerizedIf.delLink(linkId);
}
// update layerized flow's status.
layerizedIf.putStatusFaildAllFlow();
NetworkInterface upperIf = getUpperNetworkIf();
if (upperIf == null) {
return;
}
// delete upper's all flows.
upperIf.deleteAllFlow();
// delete upper's boundary link.
for (String linkId : linkLayerizerOnFlow.getLowerFlows().keySet()) {
upperIf.delLink(linkId);
}
// reset conversionTable.
conversionTable().getLink().clear();
conversionTable().getPort().clear();
conversionTable().getNode().clear();
conversionTable().getFlow().clear();
// reset layerizedLinks & lowerFlows
linkLayerizerOnFlow.getLayerizedLinks().clear();
linkLayerizerOnFlow.getLowerFlows().clear();
}
protected void subscribeLower(final String lowerId) {
logger.debug("");
try {
addEntryEventSubscription(PORT_CHANGED, lowerId);
addEntryEventSubscription(FLOW_CHANGED, lowerId);
ArrayList<String> flowAttributes = new ArrayList<String>();
updateEntryEventSubscription(FLOW_CHANGED, lowerId, flowAttributes);
applyEventSubscription();
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
}
}
protected void subscribeUpper(final String upperId) {
logger.debug("");
try {
addEntryEventSubscription(NODE_CHANGED, upperId);
addEntryEventSubscription(PORT_CHANGED, upperId);
addEntryEventSubscription(LINK_CHANGED, upperId);
addEntryEventSubscription(FLOW_CHANGED, upperId);
addEntryEventSubscription(IN_PACKET_ADDED, upperId);
String attrBase = AttrElements.ATTRIBUTES + SEPARATOR + "%s";
ArrayList<String> nodeAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase, AttrElements.OPER_STATUS),
String.format(attrBase, AttrElements.PHYSICAL_ID),
String.format(attrBase, AttrElements.VENDOR)));
updateEntryEventSubscription(NODE_CHANGED, upperId, nodeAttributes);
ArrayList<String> portAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase, AttrElements.OPER_STATUS),
String.format(attrBase, AttrElements.MAX_BANDWIDTH),
String.format(attrBase, AttrElements.PHYSICAL_ID),
String.format(attrBase, AttrElements.VENDOR)));
updateEntryEventSubscription(PORT_CHANGED, upperId, portAttributes);
ArrayList<String> linkAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase, AttrElements.OPER_STATUS),
String.format(attrBase, AttrElements.LATENCY),
String.format(attrBase, AttrElements.MAX_BANDWIDTH)));
updateEntryEventSubscription(LINK_CHANGED, upperId, linkAttributes);
ArrayList<String> flowAttributes = new ArrayList<String>(
Arrays.asList(
NetworkElements.STATUS,
String.format(attrBase, AttrElements.REQ_BANDWIDTH),
String.format(attrBase, AttrElements.REQ_LATENCY)));
updateEntryEventSubscription(FLOW_CHANGED, upperId, flowAttributes);
applyEventSubscription();
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
}
}
protected void subscribeLayerized(final String layerizedId) {
logger.debug("");
try {
addEntryEventSubscription(LINK_CHANGED, layerizedId);
addEntryEventSubscription(FLOW_CHANGED, layerizedId);
addEntryEventSubscription(OUT_PACKET_ADDED, layerizedId);
String attrBase = AttrElements.ATTRIBUTES + SEPARATOR + "%s";
ArrayList<String> nodeAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase, AttrElements.ADMIN_STATUS)));
updateEntryEventSubscription(NODE_CHANGED, layerizedId,
nodeAttributes);
ArrayList<String> portAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase,
AttrElements.UNRESERVED_BANDWIDTH),
String.format(attrBase, AttrElements.IS_BOUNDARY)));
updateEntryEventSubscription(PORT_CHANGED, layerizedId,
portAttributes);
ArrayList<String> linkAttributes = new ArrayList<String>(
Arrays.asList(
String.format(attrBase, AttrElements.COST),
String.format(attrBase, AttrElements.REQ_LATENCY),
String.format(attrBase,
AttrElements.UNRESERVED_BANDWIDTH),
String.format(attrBase, AttrElements.REQ_BANDWIDTH)));
updateEntryEventSubscription(LINK_CHANGED, layerizedId,
linkAttributes);
ArrayList<String> flowAttributes = new ArrayList<String>(
Arrays.asList(
NetworkElements.OWNER,
NetworkElements.ENABLED,
NetworkElements.PRIORITY,
String.format(attrBase, AttrElements.BANDWIDTH),
String.format(attrBase, AttrElements.LATENCY)));
updateEntryEventSubscription(FLOW_CHANGED, layerizedId,
flowAttributes);
applyEventSubscription();
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
}
}
/**
* Unsubscribe of lower network.
* @param lowerid ID for lower network.
*/
protected void unsubscribeLower(final String lowerid) {
logger.debug("");
try {
removeEntryEventSubscription(FLOW_CHANGED, lowerid);
applyEventSubscription();
} catch (Exception ex) {
logger.warn(ex.getMessage(), ex);
}
}
/**
* Unsubscribe of upper network.
* @param upperId ID for upper network.
*/
protected void unsubscribeUpper(final String upperId) {
logger.debug("");
try {
removeEntryEventSubscription(NODE_CHANGED, upperId);
removeEntryEventSubscription(PORT_CHANGED, upperId);
removeEntryEventSubscription(LINK_CHANGED, upperId);
removeEntryEventSubscription(FLOW_CHANGED, upperId);
removeEntryEventSubscription(IN_PACKET_ADDED, upperId);
applyEventSubscription();
} catch (Exception ex) {
logger.warn(ex.getMessage(), ex);
}
}
/**
* Unsubscribe of layerized network..
* @param layerizedId ID for layerizer network.
*/
protected void unsubscribeLayerized(final String layerizedId) {
logger.debug("");
try {
removeEntryEventSubscription(NODE_CHANGED, layerizedId);
removeEntryEventSubscription(PORT_CHANGED, layerizedId);
removeEntryEventSubscription(LINK_CHANGED, layerizedId);
removeEntryEventSubscription(FLOW_CHANGED, layerizedId);
removeEntryEventSubscription(OUT_PACKET_ADDED, layerizedId);
applyEventSubscription();
} catch (Exception ex) {
logger.warn(ex.getMessage(), ex);
}
}
/* //////////////////////////////////////////////////
*
* Request Event
*
* //////////////////////////////////////////////////
*/
/**
*
* @return RequestParser for LinkLayerizer.
*/
private RequestParser<IActionCallback> createParser() {
logger.debug("");
return new RequestParser<IActionCallback>() {
{
addRule(Method.PUT,
"settings/upper_link_sync",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
HashMap<String, Boolean> syncMap =
(HashMap<String, Boolean>) parsed
.getRequest().getBodyAsMap(
Boolean.class);
return putUpperLinkSync(syncMap.get("sync"));
}
});
addRule(Method.POST,
"settings/boundaries",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
LinkLayerizerBoundary boundary = parsed
.getRequest().getBody(
LinkLayerizerBoundary.class);
return postBoundary(boundary);
}
});
addRule(Method.GET,
"settings/boundaries",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
return getBoundaries();
}
});
addRule(Method.GET,
"settings/boundaries/<boundary_id>",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
String boundaryId = parsed
.getParam("boundary_id");
return getBoundary(boundaryId);
}
});
addRule(Method.PUT,
"settings/boundaries/<boundary_id>",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
LinkLayerizerBoundary boundary = parsed
.getRequest().getBody(
LinkLayerizerBoundary.class);
String boundaryId = parsed
.getParam("boundary_id");
return putBoundary(boundaryId, boundary);
}
});
addRule(Method.DELETE,
"settings/boundaries/<boundary_id>",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
String boundaryId = parsed
.getParam("boundary_id");
return deleteBoundary(boundaryId);
}
});
addRule(Method.GET,
"lower_flows",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
return getLowerFlows();
}
});
addRule(Method.GET,
"lower_flows/<link_id>",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
String linkId = parsed.getRequest().getBody(
String.class);
return getLowerFlows(linkId);
}
});
addRule(Method.GET,
"layerized_links",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
return getLayerizedlinks();
}
});
addRule(Method.GET,
"layerized_links/<flow_id>",
new IActionCallback() {
@Override
public Response process(
final RequestParser<IActionCallback>
.ParsedRequest parsed) throws Exception {
String flowId = parsed.getRequest().getBody(
String.class);
return getLayerizedLink(flowId);
}
});
}
};
}
/* (non-Javadoc)
* @see org.o3project.odenos.remoteobject.RemoteObject#onRequest(org.o3project.odenos.remoteobject.message.Request)
*/
@Override
protected Response onRequest(Request request) {
logger.debug("received {}", request.path);
try {
RequestParser<IActionCallback>.ParsedRequest parsed =
parser.parse(request);
if (parsed == null) {
return new Response(Response.BAD_REQUEST,
"Error unknown request ");
}
IActionCallback callback = parsed.getResult();
if (callback == null) {
return new Response(Response.BAD_REQUEST,
"Error unknown request ");
}
// Get response.
return callback.process(parsed);
} catch (Exception ex) {
logger.error("Error unknown request", ex);
return new Response(Response.BAD_REQUEST, "Error unknown request ");
}
}
/*
* //////////////////////////////////////////////////
*
* Event method override
*
* //////////////////////////////////////////////////
*/
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onPortAddedPre(java.lang.String, org.o3project.odenos.component.network.topology.Port)
*/
@Override
protected boolean onPortAddedPre(String networkId, Port port) {
logger.debug("");
if (!isLowerNetwork(networkId)) {
return true;
}
if (!linkLayerizerBoundaryTable.isBoudaryPort(
networkId, port.getNode(), port.getId())) {
return false;
}
NetworkInterface nwIf = networkInterfaces().get(networkId);
Port boundaryPort = nwIf.getPort(port.getNode(), port.getId());
if (boundaryPort != null) {
boundaryPort.putAttribute(AttrElements.IS_BOUNDARY, "true");
nwIf.putPort(boundaryPort);
}
return false;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onPortAddedPost(java.lang.String, org.o3project.odenos.component.network.topology.Port, java.util.HashMap)
*/
@Override
protected void onPortAddedPost(
String networkId, Port port, HashMap<String, Response> respList) {
logger.debug("");
if (!isUpperNetwork(networkId)) {
return;
}
if (!linkLayerizerBoundaryTable.isBoudaryPort(
networkId, port.getNode(), port.getId())) {
return;
}
NetworkInterface nwIf = networkInterfaces().get(networkId);
Port boundaryPort = nwIf.getPort(port.getNode(), port.getId());
if (boundaryPort == null) {
return;
}
// update upper's port attribute.
boundaryPort.putAttribute(AttrElements.IS_BOUNDARY, "true");
nwIf.putPort(boundaryPort);
// update boundary links.
reflectBoundaryLinkOnUpperPortAdded(networkId, boundaryPort);
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onPortDeletePost(java.lang.String, org.o3project.odenos.component.network.topology.Port, java.util.HashMap)
*/
@Override
protected void onPortDeletePost(String networkId, Port port, HashMap<String, Response> respList) {
logger.debug("");
if (!isUpperNetwork(networkId)) {
return;
}
if (!linkLayerizerBoundaryTable.isBoudaryPort(
networkId, port.getNode(), port.getId())) {
return;
}
// update boundary links.
reflectBoundaryLinkOnUpperPortDelete(networkId, port);
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onLinkAddedPre(java.lang.String, org.o3project.odenos.component.network.topology.Link)
*/
@Override
protected boolean onLinkAddedPre(String networkId, Link link) {
logger.debug("");
if (StringUtils.isBlank(networkId)) {
String message = "invalid networkID: " + networkId;
logger.error(message);
throw new IllegalArgumentException(message);
}
if (isUpperNetwork(networkId)) {
NetworkInterface layNwIf = getLayerizedNetworkIf();
if (layNwIf == null) {
return false;
}
Map<String, Link> layerizedLinks = layNwIf.getLinks();
if (layerizedLinks == null) {
return true;
}
String convLinkId = null;
for (String layLinkId : layerizedLinks.keySet()) {
Link layLink = layerizedLinks.get(layLinkId);
Link compLink = layLink.clone();
compLink.setPorts(
link.getSrcNode(), link.getSrcPort(),
link.getDstNode(), link.getDstPort());
// register conversion link.
if (compLink.equals(layLink)) {
conversionTable().addEntryLink(
networkId, link.getId(), // upper's link
layNwIf.getNetworkId(), layLinkId); // layerized's link
convLinkId = layLinkId;
break;
}
}
if (convLinkId == null) {
return true; // default on_link_add.
}
// sync layerized's flow to upper.
FlowSet layerizedFlows = layNwIf.getFlowSet();
for (String layFlowId : layerizedFlows.getFlows().keySet()) {
BasicFlow layFlow = (BasicFlow) layerizedFlows.getFlow(layFlowId);
if (layFlow.getPath().contains(convLinkId)) {
linkLayerizerOnFlow.flowAddedLayerizedNwExistPath(
layNwIf.getNetworkId(), layFlow);
}
}
return false;
}
if (isLayerizedNetwork(networkId)) {
return upperLinkSync;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onLinkUpdatePre(java.lang.String, org.o3project.odenos.component.network.topology.Link, org.o3project.odenos.component.network.topology.Link, java.util.ArrayList)
*/
@Override
protected boolean onLinkUpdatePre(String networkId, Link prev, Link curr,
ArrayList<String> attributesList) {
logger.debug("");
if (StringUtils.isBlank(networkId)) {
String message = "invalid networkID: " + networkId;
logger.error(message);
throw new IllegalArgumentException(message);
}
if (isLayerizedNetwork(networkId)) {
return upperLinkSync;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onLinkDeletePre(java.lang.String, org.o3project.odenos.component.network.topology.Link)
*/
@Override
protected boolean onLinkDeletePre(String networkId, Link link) {
logger.debug("");
if (StringUtils.isBlank(networkId)) {
logger.warn("invalid networkID: " + networkId);
return false;
}
if (isLayerizedNetwork(networkId)) {
return upperLinkSync;
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onFlowAddedPre(java.lang.String, org.o3project.odenos.component.network.flow.Flow)
*/
@Override
protected boolean onFlowAddedPre(String networkId, Flow flow) {
logger.debug("");
if ((StringUtils.isBlank(networkId)) || (flow == null)) {
logger.error("invalid parameter");
throw new IllegalArgumentException("invalid parameter");
}
BasicFlow basicFlow = getFlow(networkId, flow);
if (basicFlow == null) {
return false;
}
if (isLowerNetwork(networkId)) {
linkLayerizerOnFlow.flowAddedLowerNw(networkId, basicFlow);
return false;
}
if (isLayerizedNetwork(networkId)) {
if (basicFlow.getPath() != null
&& basicFlow.getPath().size() > 0) {
linkLayerizerOnFlow.flowAddedLayerizedNwExistPath(
networkId, basicFlow);
} else {
return true;
}
}
return false;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onFlowUpdatePre(java.lang.String, org.o3project.odenos.component.network.flow.Flow, org.o3project.odenos.component.network.flow.Flow, java.util.ArrayList)
*/
@Override
protected boolean onFlowUpdatePre(String networkId, Flow prev, Flow curr,
ArrayList<String> attributesList) {
logger.debug("");
BasicFlow basicFlow = getFlow(networkId, curr);
if (basicFlow == null) {
return false;
}
if (isLowerNetwork(networkId)) {
linkLayerizerOnFlow.flowUpdateLowerNw(networkId, basicFlow, attributesList);
return false;
}
if (basicFlow.getPath() != null
&& basicFlow.getPath().size() > 0) {
if (isUpperNetwork(networkId)) {
linkLayerizerOnFlow.flowUpdateUpperNwExistPath(
networkId, basicFlow, attributesList);
return false;
}
}
return true;
}
/* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#onFlowDeletePre(java.lang.String, org.o3project.odenos.component.network.flow.Flow)
*/
@Override
protected boolean onFlowDeletePre(String networkId, Flow flow) {
logger.debug("");
if (flow == null || !(flow instanceof BasicFlow)) {
logger.warn("invalid flow.");
return false;
}
BasicFlow basicFlow = (BasicFlow) flow;
if (isLowerNetwork(networkId)) {
linkLayerizerOnFlow.flowDeleteLowerNw(networkId, basicFlow);
return false;
}
if (isLayerizedNetwork(networkId)) {
if (!upperLinkSync) {
}
return true;
}
return false;
}
@Override
protected boolean onInPacketAddedPre(String networkId, InPacketAdded msg) {
logger.debug("");
NetworkInterface upperNwIf = networkInterfaces().get(networkId);
NetworkInterface layerizedNwIf = getNetworkIf(LAYERIZED_NETWORK);
if (upperNwIf == null || layerizedNwIf == null) {
return false;
}
InPacket inPacket = getInPacket(upperNwIf, msg.getId());
if (inPacket == null) {
return false;
}
String nodeId = inPacket.getNodeId();
String portId = inPacket.getPortId();
Map<String, Link> layLinks = layerizedNwIf.getLinks();
for (String linkId : layLinks.keySet()) {
Link link = layLinks.get(linkId);
if (link.getSrcNode().equals(nodeId)
&& link.getSrcPort().equals(portId)) {
return false;
}
if (link.getDstNode().equals(nodeId)
&& link.getDstPort().equals(portId)) {
return false;
}
}
return true;
}
/**
* reflect boundary link on upper's port added event.
* @param upperNwId upper's network id.
* @param upperPort upper's port.
*/
protected void reflectBoundaryLinkOnUpperPortAdded(String upperNwId, Port upperPort) {
logger.debug("");
// get lower flows.
NetworkInterface lowerNwIf = getLowerNetworkIf();
if (lowerNwIf == null) {
logger.warn("not exist lower network interface.");
return;
}
FlowSet lowerFlows = lowerNwIf.getFlowSet();
if (lowerFlows == null) {
logger.debug("not exist lower flows.");
return;
}
logger.debug(String.format("lower flows : %s", lowerFlows));
// reflect boundary links by lower flow.
Map<String, LinkLayerizerBoundary> layerizerBoundaries =
linkLayerizerBoundaryTable.getBoundaries();
for (LinkLayerizerBoundary boundary : layerizerBoundaries.values()) {
if (String.valueOf(boundary.getUpperNw()).equals(upperNwId)
&& String.valueOf(boundary.getUpperNwNode()).equals(upperPort.getNode())
&& String.valueOf(boundary.getUpperNwPort()).equals(upperPort.getId())) {
for (Flow lowFlow : lowerFlows.getFlows().values()) {
if (!(lowFlow instanceof BasicFlow)) {
logger.error("lower flow is not BasicFlow's instance.");
continue;
}
Port lowPort = lowerNwIf.getPort(
String.valueOf(boundary.getLowerNwNode()),
String.valueOf(boundary.getLowerNwPort()));
if (lowPort != null && isContainPortInFlow(lowPort, (BasicFlow)lowFlow)) {
if (String.valueOf(lowFlow.getStatus())
.equals(FlowObject.FlowStatus.ESTABLISHED.toString())) {
lowFlow.setStatus(FlowObject.FlowStatus.ESTABLISHING.toString());
// Add & Update Layerized link.
linkLayerizerOnFlow.flowAddedLowerNw(
boundary.getLowerNw(), (BasicFlow)lowFlow);
lowFlow.setStatus(FlowObject.FlowStatus.ESTABLISHED.toString());
linkLayerizerOnFlow.flowUpdateLowerNw(
boundary.getLowerNw(), (BasicFlow)lowFlow, new ArrayList<String>());
} else {
// Add Layerized link.
linkLayerizerOnFlow.flowAddedLowerNw(
boundary.getLowerNw(), (BasicFlow)lowFlow);
}
}
}
}
}
}
/**
* reflect boundary link on upper's port delete event.
* @param upperNwId upper's network id.
* @param upperPort upper's port.
*/
protected void reflectBoundaryLinkOnUpperPortDelete(String upperNwId, Port upperPort) {
logger.debug("");
// get lower flows.
NetworkInterface lowerNwIf = getLowerNetworkIf();
if (lowerNwIf == null) {
logger.warn("not exist lower network interface.");
return;
}
FlowSet lowerFlows = lowerNwIf.getFlowSet();
if (lowerFlows == null) {
logger.debug("not exist lower flows.");
return;
}
// reflect boundary links by lower flow.
Map<String, LinkLayerizerBoundary> layerizerBoundaries =
linkLayerizerBoundaryTable.getBoundaries();
for (LinkLayerizerBoundary boundary : layerizerBoundaries.values()) {
if (String.valueOf(boundary.getUpperNw()).equals(upperNwId)
&& String.valueOf(boundary.getUpperNwNode()).equals(upperPort.getNode())
&& String.valueOf(boundary.getUpperNwPort()).equals(upperPort.getId())) {
for (Flow lowFlow : lowerFlows.getFlows().values()) {
if (!(lowFlow instanceof BasicFlow)) {
logger.error("lower flow is not BasicFlow's instance.");
continue;
}
Port lowPort = lowerNwIf.getPort(
String.valueOf(boundary.getLowerNwNode()),
String.valueOf(boundary.getLowerNwPort()));
if (lowPort != null && isContainPortInFlow(lowPort, (BasicFlow)lowFlow)) {
linkLayerizerOnFlow.flowDeleteLowerNw(
boundary.getLowerNw(), (BasicFlow)lowFlow);
}
}
}
}
}
/**
* check contain port in flow's match or action.
* @param port Port
* @param basicFlow BasicFlow
* @return true: contain port in flow's match or action. false: not contain.
*/
protected final boolean isContainPortInFlow(Port port, BasicFlow basicFlow) {
logger.debug("");
String nodeId = String.valueOf(port.getNode());
String portId = String.valueOf(port.getId());
// check match's in_node, in_port.
BasicFlowMatch match = ((BasicFlow)basicFlow).getMatches().get(0);
if (match == null) {
logger.error("not exist lower flow's match.");
return false;
}
if (nodeId.equals(String.valueOf(match.getInNode()))
&& portId.equals(String.valueOf(match.getInPort()))) {
return true;
}
// check action's edge_node, out_port.
List<FlowAction> actions = ((BasicFlow)basicFlow).getEdgeActions(nodeId);
if (actions == null) {
return false;
}
for (FlowAction act : actions) {
if (act instanceof FlowActionOutput) {
String outPort = String.valueOf(((FlowActionOutput) act).getOutput());
if (portId.equals(outPort)) {
return true;
}
}
}
return false;
}
/**
*
* @param sync true: link link is reflected in upper network. false: isn't reflected.
* @return Response of the update.
*/
protected Response putUpperLinkSync(Boolean sync) {
/*
* PUT <base_uri>/settings/upper_link_sync
*/
logger.debug("");
if (sync == null) {
logger.error("sync is null");
return new Response(Response.BAD_REQUEST, "sync is null");
}
setUpperLinkisync(sync);
String message = String.format("sync %s", sync);
return new Response(Response.OK, message);
}
/**
*
* @param boundary Registered boundary
* @return Response of the boundary registration.
*/
protected Response postBoundary(LinkLayerizerBoundary boundary) {
/*
* POST <base_uri>/settings/boundaries
*/
logger.debug("");
try {
LinkLayerizerBoundary resultBoundary =
linkLayerizerBoundaryTable.addEntry(boundary);
setBoundaryPortAttr();
return new Response(Response.OK, resultBoundary);
} catch (LinkLayerizerBoundaryException ex) {
return new Response(Response.CONFLICT, "boundary already exist");
}
}
/**
*
* @return Response of the boundary list.
*/
protected Response getBoundaries() {
/*
* GET <base_uri>/settings/boundaries
*/
logger.debug("");
Map<String, LinkLayerizerBoundary> boundaries =
linkLayerizerBoundaryTable.getBoundaries();
return new Response(Response.OK, boundaries);
}
/**
*
* @param boundaryId ID for boundary.
* @return Response of the boundary.
*/
protected Response getBoundary(String boundaryId) {
/*
* GET <base_uri>/settings/boundaries/<boundary_id>
*/
logger.debug("");
if (StringUtils.isBlank(boundaryId)) {
logger.error("Boundary-ID is empty");
return new Response(Response.BAD_REQUEST, "Boundary-ID is empty");
}
/*
* do not confirm Boundary-ID
*/
LinkLayerizerBoundary boundary = linkLayerizerBoundaryTable
.getEntry(boundaryId);
return new Response(Response.OK, boundary);
}
/**
*
* @param boundaryId ID for boundary.
* @param boundary Replacement boundary.
* @return Response of the boundary replacement.
*/
protected Response putBoundary(String boundaryId,
LinkLayerizerBoundary boundary) {
/*
* PUT <base_uri>/settings/boundaries/<boundary_id>
*/
if (logger.isDebugEnabled()) {
logger.debug("boundaryId: {}", boundaryId);
}
try {
LinkLayerizerBoundary resultBoundary =
linkLayerizerBoundaryTable
.updateEntry(boundaryId, boundary);
setBoundaryPortAttr();
return new Response(Response.OK, resultBoundary);
} catch (LinkLayerizerBoundaryException ex) {
return new Response(Response.CONFLICT, "boundary already exist");
}
}
/**
*
* @param boundaryId Deleted boundary ID.
* @return Response of the boundary delete.
*/
protected Response deleteBoundary(String boundaryId) {
/*
* DELETE <base_uri>/settings/boundaries/<boundary_id>
*/
logger.debug("");
if (StringUtils.isBlank(boundaryId)) {
logger.error("Boundary-ID is empty");
return new Response(Response.BAD_REQUEST, "Boundary-ID is empty");
}
/*
* do not confirm Boundary-ID
*/
unsetBoundaryPortAttr(boundaryId);
linkLayerizerBoundaryTable.deleteEntry(boundaryId);
return new Response(Response.OK, null);
}
/**
*
* @return Response of the flows in a lower network.
*/
protected Response getLowerFlows() {
/*
* GET <base_uri>/lower_flows
*/
logger.debug("");
return new Response(Response.OK,
linkLayerizerOnFlow.getLowerFlows());
}
/**
*
* @param linkId ID for link.
* @return Response of the flow in a lower network.
*/
protected Response getLowerFlows(String linkId) {
/*
* GET <base_uri>/lower_flows/<link_id>
*/
logger.debug("");
if (StringUtils.isBlank(linkId)) {
logger.error("Link-ID is empty");
return new Response(Response.BAD_REQUEST, "Link-ID is empty");
}
List<String> flowIds =
linkLayerizerOnFlow.getLowerFlows().get(linkId);
return new Response(Response.OK, flowIds);
}
/**
*
* @return Response of the links in a layerized network.
*/
protected Response getLayerizedlinks() {
/*
* GET <base_uri>/layerized_links
*/
logger.debug("");
return new Response(Response.OK,
linkLayerizerOnFlow.getLayerizedLinks());
}
/**
*
* @param flowId ID for flow.
* @return Response of the link in a layerized network.
*/
protected Response getLayerizedLink(String flowId) {
/*
* GET <base_uri>/layerized_links/<flow_id>
*/
logger.debug("");
if (StringUtils.isBlank(flowId)) {
logger.error("Flow-ID is empty");
return new Response(Response.BAD_REQUEST, "Flow-ID is empty");
}
String linkId =
linkLayerizerOnFlow.getLayerizedLinks().get(flowId);
return new Response(Response.OK, linkId);
}
//////////////////////////////////////////////////
// common method
//////////////////////////////////////////////////
/**
* Returns flow.
* @param networkId ID for network.
* @param flow Flow.
* @return got the flow
*/
protected BasicFlow getFlow(String networkId, Flow flow) {
logger.debug("");
if (StringUtils.isBlank(networkId)) {
logger.error("Network ID is empty");
throw new IllegalArgumentException("Network ID is empty");
}
if (flow == null) {
logger.error("flow is null");
throw new IllegalArgumentException("flow is null");
}
NetworkInterface nwif = networkInterfaces().get(networkId);
String flowId = flow.getFlowId();
return getFlow(nwif, flowId);
}
/*
* (non-Javadoc)
* @see org.o3project.odenos.component.Logic#getFlow(org.o3project.odenos.component.NetworkInterface, java.lang.String)
*/
@Override
protected BasicFlow getFlow(final NetworkInterface nwIf, final String flowId) {
logger.debug("");
if ((nwIf == null) || StringUtils.isBlank(flowId)) {
logger.error("parameter is null");
throw new IllegalArgumentException("parameter is null");
}
Flow flow = nwIf.getFlow(flowId);
if (flow == null) {
return null;
}
if (!(flow instanceof BasicFlow)) {
throw new IllegalStateException("flow is not BasicFlow");
}
BasicFlow basicFlow = (BasicFlow) flow;
if (basicFlow == null
|| CollectionUtils.isEmpty(basicFlow.getMatches())) {
throw new IllegalStateException("flow is invalid BasicFlow");
}
return basicFlow;
}
protected void setBoundaryPortAttr() {
logger.debug("");
Map<String, LinkLayerizerBoundary> boundaryMap =
linkLayerizerBoundaryTable.getBoundaries();
Collection<LinkLayerizerBoundary> boundaries = boundaryMap.values();
for (LinkLayerizerBoundary boundary : boundaries) {
try {
/**
* for lower nw.
*/
String lowerNetworkId = boundary.getLowerNw();
NetworkInterface lowerNetif = networkInterfaces()
.get(lowerNetworkId);
String lowerNodeId = boundary.getLowerNwNode();
String lowerPortId = boundary.getLowerNwPort();
Port lowerPort = lowerNetif.getPort(lowerNodeId, lowerPortId);
if (lowerPort != null) {
lowerPort.putAttribute(AttrElements.IS_BOUNDARY, "true");
lowerNetif.putPort(lowerPort);
}
/**
* for upper nw.
*/
String upperNetworkId = boundary.getUpperNw();
NetworkInterface upperNetif = networkInterfaces()
.get(upperNetworkId);
String upperNodeId = boundary.getUpperNwNode();
String upperPortId = boundary.getUpperNwPort();
Port upperPort = upperNetif.getPort(upperNodeId, upperPortId);
if (upperPort != null) {
upperPort.putAttribute(AttrElements.IS_BOUNDARY, "true");
upperNetif.putPort(upperPort);
}
} catch (Exception ex) {
logger.error("Receive Exception.", ex);
}
}
}
/**
*
* @param boundaryId ID for boundary.
*/
protected void unsetBoundaryPortAttr(String boundaryId) {
logger.debug("");
if (boundaryId == null) {
return;
}
Map<String, LinkLayerizerBoundary> boundaryMap =
linkLayerizerBoundaryTable.getBoundaries();
LinkLayerizerBoundary boundary = boundaryMap.get(boundaryId);
if (boundary == null) {
return;
}
// for lower nw
String lowerNetworkId = boundary.getLowerNw();
NetworkInterface lowerNetif = networkInterfaces()
.get(lowerNetworkId);
String lowerNodeId = boundary.getLowerNwNode();
String lowerPortId = boundary.getLowerNwPort();
Port lowerPort = lowerNetif.getPort(lowerNodeId, lowerPortId);
lowerPort.deleteAttribute(AttrElements.IS_BOUNDARY);
lowerNetif.putPort(lowerPort);
// for upper nw
String upperNetworkId = boundary.getUpperNw();
NetworkInterface upperNetif = networkInterfaces()
.get(upperNetworkId);
String upperNodeId = boundary.getUpperNwNode();
String upperPortId = boundary.getUpperNwPort();
Port upperPort = upperNetif.getPort(upperNodeId, upperPortId);
upperPort.deleteAttribute(AttrElements.IS_BOUNDARY);
upperNetif.putPort(upperPort);
}
/**
*
* @return List of ID for lower network.
*/
protected final List<String> getLowerNetworkIds() {
return getNetworkIds(LOWER_NETWORK);
}
/**
*
* @return List of ID for upper network.
*/
protected final List<String> getUpperNetworkIds() {
return getNetworkIds(UPPER_NETWORK);
}
/**
*
* @return List of ID for layerized network.
*/
protected final List<String> getLayerizedNetworkIds() {
return getNetworkIds(LAYERIZED_NETWORK);
}
/**
*
* @param type Type of the network.
* @return List of ID for the network.
*/
protected final List<String> getNetworkIds(String type) {
logger.debug("");
ConversionTable convTable = conversionTable();
ArrayList<String> ids =
convTable.getConnectionList(type);
return ids;
}
/**
*
* @return NetworkInterface of upper network.
*/
protected final NetworkInterface getUpperNetworkIf() {
return getNetworkIf(UPPER_NETWORK);
}
/**
*
* @return NetworkInterface of layerized network.
*/
protected final NetworkInterface getLayerizedNetworkIf() {
return getNetworkIf(LAYERIZED_NETWORK);
}
/**
*
* @return NetworkInterface of lower network.
*/
protected final NetworkInterface getLowerNetworkIf() {
return getNetworkIf(LOWER_NETWORK);
}
/**
*
* @param type Type of the network.
* @return NetworkInterface for the network.
*/
protected final NetworkInterface getNetworkIf(String type) {
logger.debug("");
ConversionTable convTable = conversionTable();
ArrayList<String> ids =
convTable.getConnectionList(type);
if (CollectionUtils.isEmpty(ids)) {
return null;
}
String id = ids.get(0);
NetworkInterface netIf = networkInterfaces().get(id);
return netIf;
}
/**
*
* @param ids List of ID for the network.
* @return NetworkInterface for the network.
*/
protected final NetworkInterface getNetworkIf(List<String> ids) {
logger.debug("");
if (CollectionUtils.isEmpty(ids)) {
logger.error("ids is empty");
throw new IllegalArgumentException("ids is empty");
}
String id = ids.get(0);
NetworkInterface netIf = networkInterfaces().get(id);
return netIf;
}
/**
*
* @param type Type of the network.
* @return true: connected to the network. false: not connected.
*/
protected final boolean isConnectionType(String type) {
logger.debug("");
ConversionTable convTable = conversionTable();
boolean result = convTable.isConnectionType(type);
return result;
}
/**
*
* @param networkId ID for layerized network.
* @return true: connected to layerized network. false: not connected.
*/
protected final boolean isLayerizedNetwork(String networkId) {
logger.debug("");
String connType = getConnectionType(networkId);
if (LAYERIZED_NETWORK.equals(connType)) {
return true;
}
return false;
}
/**
*
* @param networkId ID for lower network.
* @return true: connected to lower network. false: not connected.
*/
protected final boolean isLowerNetwork(String networkId) {
logger.debug("");
String connType = getConnectionType(networkId);
if (LOWER_NETWORK.equals(connType)) {
return true;
}
return false;
}
/**
*
* @param networkId ID for upper network.
* @return true: connected to upper network. false: not connected.
*/
protected final boolean isUpperNetwork(String networkId) {
logger.debug("");
String connType = getConnectionType(networkId);
if (UPPER_NETWORK.equals(connType)) {
return true;
}
return false;
}
/**
*
* @param networkId ID for the network.
* @return Type of the network.
*/
protected final String getConnectionType(String networkId) {
logger.debug("");
ConversionTable convTable = conversionTable();
String connType = convTable.getConnectionType(networkId);
return connType;
}
}
|
|
package eu.bryants.anthony.plinth.ast.member;
import eu.bryants.anthony.plinth.ast.InterfaceDefinition;
import eu.bryants.anthony.plinth.ast.LexicalPhrase;
import eu.bryants.anthony.plinth.ast.TypeDefinition;
import eu.bryants.anthony.plinth.ast.expression.Expression;
import eu.bryants.anthony.plinth.ast.metadata.GlobalVariable;
import eu.bryants.anthony.plinth.ast.metadata.MemberFunction;
import eu.bryants.anthony.plinth.ast.metadata.MemberVariable;
import eu.bryants.anthony.plinth.ast.metadata.PropertyPseudoVariable;
import eu.bryants.anthony.plinth.ast.misc.Parameter;
import eu.bryants.anthony.plinth.ast.statement.Block;
import eu.bryants.anthony.plinth.ast.terminal.SinceSpecifier;
import eu.bryants.anthony.plinth.ast.type.NamedType;
import eu.bryants.anthony.plinth.ast.type.Type;
/*
* Created on 22 Feb 2013
*/
/**
* @author Anthony Bryant
*/
public class Property extends Member
{
// an abstract property does not have implementations for its getter and setter, and is always unbacked
private boolean isAbstract;
// a final property can only have its constructor called once, and its setter never (but the backing variable is never final)
private boolean isFinal;
// a mutable property has a mutable backing variable, and can be assigned to on an immutable receiver, and accesses to it never result in contextually immutable values
// basically, the backing variable and the property itself both behave as mutable fields
private boolean isMutable;
// a static property is part of a type rather than an object, and its backing variable (if any) is a global variable rather than a member variable
private boolean isStatic;
// an unbacked property has no backing variable
private boolean isUnbacked;
private SinceSpecifier sinceSpecifier;
private Type type;
private String name;
private Expression initialiserExpression;
private boolean declaresGetter;
private boolean getterImmutable;
private NamedType[] getterUncheckedThrownTypes;
private Block getterBlock;
private boolean declaresSetter;
private boolean setterImmutable;
private Parameter setterParameter;
private NamedType[] setterUncheckedThrownTypes;
private Block setterBlock;
private boolean declaresConstructor;
private boolean constructorImmutable;
private Parameter constructorParameter;
private NamedType[] constructorUncheckedThrownTypes;
private Block constructorBlock;
private PropertyPseudoVariable pseudoVariable;
private MemberVariable backingMemberVariable;
private GlobalVariable backingGlobalVariable;
private MemberFunction getterMemberFunction;
private MemberFunction setterMemberFunction;
private MemberFunction constructorMemberFunction;
private TypeDefinition containingTypeDefinition;
/**
* Creates a new Property with the specified properties.
* @param isAbstract - true if the property should be abstract
* @param isFinal - true if the property should be final
* @param isMutable - true if the property should be mutable
* @param isStatic - true if the property should be static
* @param isUnbacked - true if the property should not have a backing variable
* @param sinceSpecifier - the since specifier for the property, or null if there is none
* @param type - the type of the property
* @param name - the name of the property
* @param initialiserExpression - the initialiser expression, or null if there is none
* @param declaresGetter - whether this property explicitly declares a getter
* @param getterImmutable - true if the getter should be an immutable function
* @param getterUncheckedThrownTypes - the list of unchecked types thrown by this property's getter
* @param getterBlock - the Block containing the getter's implementation, or null if the default implementation should be used
* @param declaresSetter - whether this property explicitly declares a setter
* @param setterImmutable - true if the setter should be an immutable function
* @param setterParameter - the setter's parameter, or null if the setter does not have a block
* @param setterUncheckedThrownTypes - the list of unchecked types thrown by this property's setter
* @param setterBlock - the Block containing the setter's implementation, or null if the default implementation should be used
* @param declaresConstructor - whether this property explicitly declares a constructor
* @param constructorImmutable - true if the constructor should be an immutable function
* @param constructorParameter - the constructor's parameter, or null if the constructor does not have a block
* @param constructorUncheckedThrownTypes - the list of unchecked types thrown by this property's constructor
* @param constructorBlock - the Block containing the constructor's implementation, or null if the default implementation should be used
* @param lexicalPhrase - the LexicalPhrase representing the Property's location in the source code
*/
public Property(boolean isAbstract, boolean isFinal, boolean isMutable, boolean isStatic, boolean isUnbacked, SinceSpecifier sinceSpecifier, Type type, String name, Expression initialiserExpression,
boolean declaresGetter, boolean getterImmutable, NamedType[] getterUncheckedThrownTypes, Block getterBlock,
boolean declaresSetter, boolean setterImmutable, Parameter setterParameter, NamedType[] setterUncheckedThrownTypes, Block setterBlock,
boolean declaresConstructor, boolean constructorImmutable, Parameter constructorParameter, NamedType[] constructorUncheckedThrownTypes, Block constructorBlock,
LexicalPhrase lexicalPhrase)
{
super(lexicalPhrase);
this.isAbstract = isAbstract;
this.isFinal = isFinal;
this.isMutable = isMutable;
this.isStatic = isStatic;
this.isUnbacked = isUnbacked;
this.sinceSpecifier = sinceSpecifier;
this.type = type;
this.name = name;
this.initialiserExpression = initialiserExpression;
this.declaresGetter = declaresGetter;
this.getterImmutable = getterImmutable;
this.getterUncheckedThrownTypes = getterUncheckedThrownTypes;
this.getterBlock = getterBlock;
this.declaresSetter = declaresSetter;
this.setterImmutable = setterImmutable;
this.setterParameter = setterParameter;
this.setterUncheckedThrownTypes = setterUncheckedThrownTypes;
this.setterBlock = setterBlock;
this.declaresConstructor = declaresConstructor;
this.constructorImmutable = declaresConstructor ? constructorImmutable : setterImmutable;
this.constructorParameter = constructorParameter;
this.constructorUncheckedThrownTypes = constructorUncheckedThrownTypes;
this.constructorBlock = constructorBlock;
pseudoVariable = new PropertyPseudoVariable(this);
}
/**
* Computes whether this property has a constructor, based on the following rules:
* <ul>
* <li>all final properties have constructors</li>
* <li>static properties do not have constructors unless they are final</li>
* <li>if a non-static, non-final property declares a constructor, it always gets one</li>
* <li>if the property has a backing variable which doesn't have a default value, it needs a constructor (note: static properties must always have default values)</li>
* <li>properties do not need a constructor unless one of the previous rules applies</li>
* </ul>
* @return true if this Property has a constructor
*/
public boolean hasConstructor()
{
if (isFinal)
{
// all final properties have constructors
return true;
}
if (isStatic)
{
// static properties do not have constructors unless they are final
return false;
}
if (declaresConstructor)
{
// if a non-static, non-final property declares a constructor, it always gets one
return true;
}
if ((isAbstract || !isUnbacked) && !type.hasDefaultValue())
{
// if the property has a backing variable which doesn't have a default value, it needs a constructor
return true;
}
// properties do not need a constructor unless one of the previous rules applies
return false;
}
/**
* This should only be used when adding a Property to an interface.
* @param isAbstract - true if this Property should be abstract
*/
public void setAbstract(boolean isAbstract)
{
this.isAbstract = isAbstract;
}
/**
* @return the isAbstract
*/
public boolean isAbstract()
{
return isAbstract;
}
/**
* @return the isFinal
*/
public boolean isFinal()
{
return isFinal;
}
/**
* @return the isMutable
*/
public boolean isMutable()
{
return isMutable;
}
/**
* @return the isStatic
*/
public boolean isStatic()
{
return isStatic;
}
/**
* This should only be called when adding a Property to a type.
* For example, if it is abstract, it should probably be set to unbacked.
* @param isUnbacked - the isUnbacked to set
*/
public void setUnbacked(boolean isUnbacked)
{
this.isUnbacked = isUnbacked;
}
/**
* @return the isUnbacked
*/
public boolean isUnbacked()
{
return isUnbacked;
}
/**
* @return the sinceSpecifier
*/
public SinceSpecifier getSinceSpecifier()
{
return sinceSpecifier;
}
/**
* @return the type
*/
public Type getType()
{
return type;
}
/**
* @return the name
*/
public String getName()
{
return name;
}
/**
* @return the initialiserExpression
*/
public Expression getInitialiserExpression()
{
return initialiserExpression;
}
/**
* @return the declaresGetter
*/
public boolean getDeclaresGetter()
{
return declaresGetter;
}
/**
* @return the getterImmutable
*/
public boolean isGetterImmutable()
{
return getterImmutable;
}
/**
* @return the getterUncheckedThrownTypes
*/
public NamedType[] getGetterUncheckedThrownTypes()
{
return getterUncheckedThrownTypes;
}
/**
* @return the getterBlock
*/
public Block getGetterBlock()
{
return getterBlock;
}
/**
* @return the declaresSetter
*/
public boolean getDeclaresSetter()
{
return declaresSetter;
}
/**
* @return the setterImmutable
*/
public boolean isSetterImmutable()
{
return setterImmutable;
}
/**
* @return the setterParameter
*/
public Parameter getSetterParameter()
{
return setterParameter;
}
/**
* @return the setterUncheckedThrownTypes
*/
public NamedType[] getSetterUncheckedThrownTypes()
{
return setterUncheckedThrownTypes;
}
/**
* @return the setterBlock
*/
public Block getSetterBlock()
{
return setterBlock;
}
/**
* @return the declaresConstructor
*/
public boolean getDeclaresConstructor()
{
return declaresConstructor;
}
/**
* @return the constructorImmutable
*/
public boolean isConstructorImmutable()
{
return constructorImmutable;
}
/**
* @return the constructorParameter
*/
public Parameter getConstructorParameter()
{
return constructorParameter;
}
/**
* @return the constructorUncheckedThrownTypes
*/
public NamedType[] getConstructorUncheckedThrownTypes()
{
return constructorUncheckedThrownTypes;
}
/**
* @return the constructorBlock
*/
public Block getConstructorBlock()
{
return constructorBlock;
}
/**
* @return the pseudoVariable
*/
public PropertyPseudoVariable getPseudoVariable()
{
return pseudoVariable;
}
/**
* @param pseudoVariable - the pseudoVariable to set
*/
public void setPseudoVariable(PropertyPseudoVariable pseudoVariable)
{
this.pseudoVariable = pseudoVariable;
}
/**
* @return the backingMemberVariable
*/
public MemberVariable getBackingMemberVariable()
{
return backingMemberVariable;
}
/**
* @param backingMemberVariable - the backingMemberVariable to set
*/
public void setBackingMemberVariable(MemberVariable backingMemberVariable)
{
this.backingMemberVariable = backingMemberVariable;
}
/**
* @return the backingGlobalVariable
*/
public GlobalVariable getBackingGlobalVariable()
{
return backingGlobalVariable;
}
/**
* @param backingGlobalVariable - the backingGlobalVariable to set
*/
public void setBackingGlobalVariable(GlobalVariable backingGlobalVariable)
{
this.backingGlobalVariable = backingGlobalVariable;
}
/**
* @return the getterMemberFunction
*/
public MemberFunction getGetterMemberFunction()
{
return getterMemberFunction;
}
/**
* @param getterMemberFunction - the getterMemberFunction to set
*/
public void setGetterMemberFunction(MemberFunction getterMemberFunction)
{
this.getterMemberFunction = getterMemberFunction;
}
/**
* @return the setterMemberFunction
*/
public MemberFunction getSetterMemberFunction()
{
return setterMemberFunction;
}
/**
* @param setterMemberFunction - the setterMemberFunction to set
*/
public void setSetterMemberFunction(MemberFunction setterMemberFunction)
{
this.setterMemberFunction = setterMemberFunction;
}
/**
* @return the constructorMemberFunction
*/
public MemberFunction getConstructorMemberFunction()
{
return constructorMemberFunction;
}
/**
* @param constructorMemberFunction - the constructorMemberFunction to set
*/
public void setConstructorMemberFunction(MemberFunction constructorMemberFunction)
{
this.constructorMemberFunction = constructorMemberFunction;
}
/**
* @return the containingTypeDefinition
*/
public TypeDefinition getContainingTypeDefinition()
{
return containingTypeDefinition;
}
/**
* @param containingTypeDefinition - the containingTypeDefinition to set
*/
public void setContainingTypeDefinition(TypeDefinition containingTypeDefinition)
{
this.containingTypeDefinition = containingTypeDefinition;
}
/**
* @param typeString - the mangled type of the part of this property to be represented (e.g. "G" for getter)
* @return the descriptor string for the specified type of property function, which should be used in the virtual function table descriptor for this property's class
*/
private String getDescriptorString(String typeString)
{
StringBuffer buffer = new StringBuffer();
if (!isStatic && containingTypeDefinition instanceof InterfaceDefinition)
{
// non-static interface functions must have a unique disambiguator, since their calling convention depends on which interface they are part of
buffer.append('I');
buffer.append(containingTypeDefinition.getQualifiedName().getMangledName());
}
buffer.append(isStatic ? "SP" : "P");
buffer.append(typeString);
buffer.append('_');
buffer.append(name);
return buffer.toString();
}
/**
* @return the descriptor for the getter of this property, which should be used in the virtual function table descriptor for this property's class
*/
public String getGetterDescriptor()
{
return getDescriptorString("G");
}
/**
* @return the descriptor for the setter of this property, which should be used in the virtual function table descriptor for this property's class
*/
public String getSetterDescriptor()
{
return getDescriptorString("S");
}
/**
* @return the descriptor for the constructor of this property, which should be used in the virtual function table descriptor for this property's class
*/
public String getConstructorDescriptor()
{
return getDescriptorString("C");
}
/**
* @param typeString - the mangled type of the part of this property to be represented (e.g. "G" for getter)
* @return the mangled name of part of this Property
*/
private String getMangledName(String typeString)
{
StringBuffer buffer = new StringBuffer();
if (isStatic)
{
buffer.append("_SP");
}
else
{
buffer.append("_P");
}
buffer.append(typeString);
buffer.append(containingTypeDefinition.getQualifiedName().getMangledName());
buffer.append('_');
if (isStatic)
{
if (sinceSpecifier != null)
{
buffer.append(sinceSpecifier.getMangledName());
}
buffer.append('_');
}
buffer.append(name);
return buffer.toString();
}
/**
* @return the mangled name of this Property's getter
*/
public String getGetterMangledName()
{
return getMangledName("G");
}
/**
* @return the mangled name of this Property's setter
*/
public String getSetterMangledName()
{
return getMangledName("S");
}
/**
* @return the mangled name of this Property's constructor
*/
public String getConstructorMangledName()
{
return getMangledName("C");
}
/**
* @return the mangled name of this Property's backing variable
*/
public String getBackingVariableMangledName()
{
return getMangledName("B");
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
StringBuffer buffer = new StringBuffer();
if (isAbstract)
{
buffer.append("abstract ");
}
if (isFinal)
{
buffer.append("final ");
}
if (isMutable)
{
buffer.append("mutable ");
}
if (isStatic)
{
buffer.append("static ");
}
if (isUnbacked)
{
buffer.append("unbacked ");
}
if (sinceSpecifier != null)
{
buffer.append(sinceSpecifier);
buffer.append(' ');
}
buffer.append("property ");
buffer.append(type);
buffer.append(' ');
buffer.append(name);
if (initialiserExpression != null)
{
buffer.append(" = ");
buffer.append(initialiserExpression);
}
if (declaresConstructor)
{
buffer.append('\n');
if (constructorImmutable)
{
buffer.append("immutable ");
}
buffer.append("constructor");
if (constructorBlock != null)
{
buffer.append('(');
buffer.append(constructorParameter);
buffer.append(')');
if (constructorUncheckedThrownTypes != null && constructorUncheckedThrownTypes.length > 0)
{
buffer.append(" throws ");
for (int i = 0; i < constructorUncheckedThrownTypes.length; ++i)
{
buffer.append("unchecked ");
buffer.append(constructorUncheckedThrownTypes[i]);
if (i != constructorUncheckedThrownTypes.length - 1)
{
buffer.append(", ");
}
}
}
buffer.append('\n');
buffer.append(constructorBlock);
}
}
if (declaresSetter)
{
buffer.append('\n');
if (setterImmutable)
{
buffer.append("immutable ");
}
buffer.append("setter");
if (setterBlock != null)
{
buffer.append('(');
buffer.append(setterParameter);
buffer.append(')');
if (setterUncheckedThrownTypes != null && setterUncheckedThrownTypes.length > 0)
{
buffer.append(" throws ");
for (int i = 0; i < setterUncheckedThrownTypes.length; ++i)
{
buffer.append("unchecked ");
buffer.append(setterUncheckedThrownTypes[i]);
if (i != setterUncheckedThrownTypes.length - 1)
{
buffer.append(", ");
}
}
}
buffer.append('\n');
buffer.append(setterBlock);
}
}
if (declaresGetter)
{
buffer.append('\n');
if (!getterImmutable)
{
buffer.append("mutable ");
}
buffer.append("getter");
if (getterUncheckedThrownTypes != null && getterUncheckedThrownTypes.length > 0)
{
buffer.append(" throws ");
for (int i = 0; i < getterUncheckedThrownTypes.length; ++i)
{
buffer.append("unchecked ");
buffer.append(getterUncheckedThrownTypes[i]);
if (i != getterUncheckedThrownTypes.length - 1)
{
buffer.append(", ");
}
}
}
if (getterBlock != null)
{
buffer.append('\n');
buffer.append(getterBlock);
}
}
buffer.append(';');
return buffer.toString();
}
}
|
|
/* Generated By:JavaCC: Do not edit this line. mas2jTokenManager.java */
package jason.mas2j.parser;
import java.util.*;
import java.io.*;
import jason.mas2j.*;
import jason.asSyntax.*;
import jason.asSemantics.*;
import jason.jeditplugin.*;
public class mas2jTokenManager implements mas2jConstants
{
public java.io.PrintStream debugStream = System.out;
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0x712ff00L) != 0L)
{
jjmatchedKind = 29;
return 88;
}
if ((active0 & 0x40000000000L) != 0L)
return 45;
if ((active0 & 0x400000L) != 0L)
{
jjmatchedKind = 29;
return 14;
}
if ((active0 & 0x80000L) != 0L)
{
jjmatchedKind = 29;
return 21;
}
if ((active0 & 0x80L) != 0L)
{
jjmatchedKind = 30;
return 89;
}
if ((active0 & 0x200000L) != 0L)
{
jjmatchedKind = 29;
return 5;
}
return -1;
case 1:
if ((active0 & 0x200000L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 1;
return 4;
}
if ((active0 & 0x75af700L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 1;
return 90;
}
if ((active0 & 0x80L) != 0L)
{
jjmatchedKind = 30;
jjmatchedPos = 1;
return 78;
}
if ((active0 & 0x800L) != 0L)
return 90;
return -1;
case 2:
if ((active0 & 0x77af700L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 2;
return 90;
}
if ((active0 & 0x80L) != 0L)
return 78;
return -1;
case 3:
if ((active0 & 0x77af700L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 3;
return 90;
}
return -1;
case 4:
if ((active0 & 0x772f700L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 4;
return 90;
}
if ((active0 & 0x80000L) != 0L)
return 90;
return -1;
case 5:
if ((active0 & 0x7727600L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 5;
return 90;
}
if ((active0 & 0x8100L) != 0L)
return 90;
return -1;
case 6:
if ((active0 & 0x120000L) != 0L)
return 90;
if ((active0 & 0x7607600L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 6;
return 90;
}
return -1;
case 7:
if ((active0 & 0x7607600L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 7;
return 90;
}
return -1;
case 8:
if ((active0 & 0x7605600L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 8;
return 90;
}
if ((active0 & 0x2000L) != 0L)
return 90;
return -1;
case 9:
if ((active0 & 0x1200000L) != 0L)
return 90;
if ((active0 & 0x6405600L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 9;
return 90;
}
return -1;
case 10:
if ((active0 & 0x6405400L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 10;
return 90;
}
if ((active0 & 0x200L) != 0L)
return 90;
return -1;
case 11:
if ((active0 & 0x400000L) != 0L)
return 90;
if ((active0 & 0x6005400L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 11;
return 90;
}
return -1;
case 12:
if ((active0 & 0x4000L) != 0L)
return 90;
if ((active0 & 0x6001400L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 12;
return 90;
}
return -1;
case 13:
if ((active0 & 0x4000400L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 13;
return 90;
}
if ((active0 & 0x2001000L) != 0L)
return 90;
return -1;
case 14:
if ((active0 & 0x4000000L) != 0L)
return 90;
if ((active0 & 0x400L) != 0L)
{
jjmatchedKind = 29;
jjmatchedPos = 14;
return 90;
}
return -1;
default :
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private final int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private final int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
private final int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 35:
return jjStopAtPos(0, 40);
case 40:
return jjStopAtPos(0, 43);
case 41:
return jjStopAtPos(0, 45);
case 44:
return jjStopAtPos(0, 44);
case 46:
return jjStartNfaWithStates_0(0, 42, 45);
case 58:
return jjStopAtPos(0, 39);
case 59:
return jjStopAtPos(0, 41);
case 61:
return jjStopAtPos(0, 48);
case 77:
return jjMoveStringLiteralDfa1_0(0x80L);
case 91:
return jjStopAtPos(0, 46);
case 93:
return jjStopAtPos(0, 47);
case 97:
return jjMoveStringLiteralDfa1_0(0x3004900L);
case 98:
return jjMoveStringLiteralDfa1_0(0x4000000L);
case 99:
return jjMoveStringLiteralDfa1_0(0x2000L);
case 100:
return jjMoveStringLiteralDfa1_0(0x200000L);
case 101:
return jjMoveStringLiteralDfa1_0(0x8600L);
case 105:
return jjMoveStringLiteralDfa1_0(0x21000L);
case 110:
return jjMoveStringLiteralDfa1_0(0x80000L);
case 115:
return jjMoveStringLiteralDfa1_0(0x400000L);
case 118:
return jjMoveStringLiteralDfa1_0(0x100000L);
case 123:
return jjStopAtPos(0, 37);
case 125:
return jjStopAtPos(0, 38);
default :
return jjMoveNfa_0(6, 0);
}
}
private final int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa2_0(active0, 0x80L);
case 101:
return jjMoveStringLiteralDfa2_0(active0, 0x4100000L);
case 103:
return jjMoveStringLiteralDfa2_0(active0, 0x3000100L);
case 105:
return jjMoveStringLiteralDfa2_0(active0, 0x200000L);
case 108:
return jjMoveStringLiteralDfa2_0(active0, 0x2000L);
case 110:
return jjMoveStringLiteralDfa2_0(active0, 0x21200L);
case 114:
return jjMoveStringLiteralDfa2_0(active0, 0x80000L);
case 115:
return jjMoveStringLiteralDfa2_0(active0, 0x4000L);
case 116:
if ((active0 & 0x800L) != 0L)
return jjStartNfaWithStates_0(1, 11, 90);
break;
case 118:
return jjMoveStringLiteralDfa2_0(active0, 0x8000L);
case 120:
return jjMoveStringLiteralDfa2_0(active0, 0x400L);
case 121:
return jjMoveStringLiteralDfa2_0(active0, 0x400000L);
default :
break;
}
return jjStartNfa_0(0, active0);
}
private final int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(0, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(1, active0);
return 2;
}
switch(curChar)
{
case 83:
if ((active0 & 0x80L) != 0L)
return jjStartNfaWithStates_0(2, 7, 78);
break;
case 97:
return jjMoveStringLiteralDfa3_0(active0, 0x2000L);
case 99:
return jjMoveStringLiteralDfa3_0(active0, 0x80000L);
case 101:
return jjMoveStringLiteralDfa3_0(active0, 0x3008500L);
case 102:
return jjMoveStringLiteralDfa3_0(active0, 0x1000L);
case 108:
return jjMoveStringLiteralDfa3_0(active0, 0x4004000L);
case 110:
return jjMoveStringLiteralDfa3_0(active0, 0x400000L);
case 114:
return jjMoveStringLiteralDfa3_0(active0, 0x300000L);
case 116:
return jjMoveStringLiteralDfa3_0(active0, 0x20000L);
case 118:
return jjMoveStringLiteralDfa3_0(active0, 0x200L);
default :
break;
}
return jjStartNfa_0(1, active0);
}
private final int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(1, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(2, active0);
return 3;
}
switch(curChar)
{
case 66:
return jjMoveStringLiteralDfa4_0(active0, 0x20000L);
case 83:
return jjMoveStringLiteralDfa4_0(active0, 0x4000L);
case 98:
return jjMoveStringLiteralDfa4_0(active0, 0x180000L);
case 99:
return jjMoveStringLiteralDfa4_0(active0, 0x400400L);
case 101:
return jjMoveStringLiteralDfa4_0(active0, 0x200000L);
case 105:
return jjMoveStringLiteralDfa4_0(active0, 0x4000200L);
case 110:
return jjMoveStringLiteralDfa4_0(active0, 0x3008100L);
case 114:
return jjMoveStringLiteralDfa4_0(active0, 0x1000L);
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x2000L);
default :
break;
}
return jjStartNfa_0(2, active0);
}
private final int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(2, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(3, active0);
return 4;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa5_0(active0, 0x1000L);
case 99:
return jjMoveStringLiteralDfa5_0(active0, 0x200000L);
case 101:
return jjMoveStringLiteralDfa5_0(active0, 0x4020000L);
case 104:
return jjMoveStringLiteralDfa5_0(active0, 0x400000L);
case 111:
return jjMoveStringLiteralDfa5_0(active0, 0x104000L);
case 112:
if ((active0 & 0x80000L) != 0L)
return jjStartNfaWithStates_0(4, 19, 90);
break;
case 114:
return jjMoveStringLiteralDfa5_0(active0, 0x200L);
case 115:
return jjMoveStringLiteralDfa5_0(active0, 0x2000L);
case 116:
return jjMoveStringLiteralDfa5_0(active0, 0x3008100L);
case 117:
return jjMoveStringLiteralDfa5_0(active0, 0x400L);
default :
break;
}
return jjStartNfa_0(3, active0);
}
private final int jjMoveStringLiteralDfa5_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(3, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(4, active0);
return 5;
}
switch(curChar)
{
case 65:
return jjMoveStringLiteralDfa6_0(active0, 0x2000000L);
case 67:
return jjMoveStringLiteralDfa6_0(active0, 0x1000000L);
case 102:
return jjMoveStringLiteralDfa6_0(active0, 0x4000000L);
case 108:
return jjMoveStringLiteralDfa6_0(active0, 0x20000L);
case 111:
return jjMoveStringLiteralDfa6_0(active0, 0x200L);
case 112:
return jjMoveStringLiteralDfa6_0(active0, 0x2000L);
case 114:
return jjMoveStringLiteralDfa6_0(active0, 0x400000L);
case 115:
if ((active0 & 0x100L) != 0L)
return jjStartNfaWithStates_0(5, 8, 90);
else if ((active0 & 0x8000L) != 0L)
return jjStartNfaWithStates_0(5, 15, 90);
return jjMoveStringLiteralDfa6_0(active0, 0x101000L);
case 116:
return jjMoveStringLiteralDfa6_0(active0, 0x200400L);
case 117:
return jjMoveStringLiteralDfa6_0(active0, 0x4000L);
default :
break;
}
return jjStartNfa_0(4, active0);
}
private final int jjMoveStringLiteralDfa6_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(4, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(5, active0);
return 6;
}
switch(curChar)
{
case 66:
return jjMoveStringLiteralDfa7_0(active0, 0x4000000L);
case 97:
return jjMoveStringLiteralDfa7_0(active0, 0x2000L);
case 101:
if ((active0 & 0x100000L) != 0L)
return jjStartNfaWithStates_0(6, 20, 90);
break;
case 105:
return jjMoveStringLiteralDfa7_0(active0, 0x200400L);
case 108:
return jjMoveStringLiteralDfa7_0(active0, 0x1000000L);
case 110:
return jjMoveStringLiteralDfa7_0(active0, 0x200L);
case 111:
return jjMoveStringLiteralDfa7_0(active0, 0x400000L);
case 114:
return jjMoveStringLiteralDfa7_0(active0, 0x2004000L);
case 115:
if ((active0 & 0x20000L) != 0L)
return jjStartNfaWithStates_0(6, 17, 90);
break;
case 116:
return jjMoveStringLiteralDfa7_0(active0, 0x1000L);
default :
break;
}
return jjStartNfa_0(5, active0);
}
private final int jjMoveStringLiteralDfa7_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(5, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(6, active0);
return 7;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa8_0(active0, 0x5000000L);
case 99:
return jjMoveStringLiteralDfa8_0(active0, 0x2004000L);
case 109:
return jjMoveStringLiteralDfa8_0(active0, 0x200L);
case 110:
return jjMoveStringLiteralDfa8_0(active0, 0x400000L);
case 111:
return jjMoveStringLiteralDfa8_0(active0, 0x400L);
case 114:
return jjMoveStringLiteralDfa8_0(active0, 0x1000L);
case 116:
return jjMoveStringLiteralDfa8_0(active0, 0x2000L);
case 118:
return jjMoveStringLiteralDfa8_0(active0, 0x200000L);
default :
break;
}
return jjStartNfa_0(6, active0);
}
private final int jjMoveStringLiteralDfa8_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(6, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(7, active0);
return 8;
}
switch(curChar)
{
case 101:
return jjMoveStringLiteralDfa9_0(active0, 0x204200L);
case 104:
if ((active0 & 0x2000L) != 0L)
return jjStartNfaWithStates_0(8, 13, 90);
return jjMoveStringLiteralDfa9_0(active0, 0x2000000L);
case 105:
return jjMoveStringLiteralDfa9_0(active0, 0x400000L);
case 110:
return jjMoveStringLiteralDfa9_0(active0, 0x400L);
case 115:
return jjMoveStringLiteralDfa9_0(active0, 0x5000000L);
case 117:
return jjMoveStringLiteralDfa9_0(active0, 0x1000L);
default :
break;
}
return jjStartNfa_0(7, active0);
}
private final int jjMoveStringLiteralDfa9_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(7, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(8, active0);
return 9;
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa10_0(active0, 0x2000400L);
case 80:
return jjMoveStringLiteralDfa10_0(active0, 0x4000L);
case 99:
return jjMoveStringLiteralDfa10_0(active0, 0x1000L);
case 101:
return jjMoveStringLiteralDfa10_0(active0, 0x4000000L);
case 110:
return jjMoveStringLiteralDfa10_0(active0, 0x200L);
case 115:
if ((active0 & 0x200000L) != 0L)
return jjStartNfaWithStates_0(9, 21, 90);
else if ((active0 & 0x1000000L) != 0L)
return jjStartNfaWithStates_0(9, 24, 90);
return jjMoveStringLiteralDfa10_0(active0, 0x400000L);
default :
break;
}
return jjStartNfa_0(8, active0);
}
private final int jjMoveStringLiteralDfa10_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(8, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(9, active0);
return 10;
}
switch(curChar)
{
case 67:
return jjMoveStringLiteralDfa11_0(active0, 0x4000000L);
case 97:
return jjMoveStringLiteralDfa11_0(active0, 0x4000L);
case 101:
return jjMoveStringLiteralDfa11_0(active0, 0x400000L);
case 108:
return jjMoveStringLiteralDfa11_0(active0, 0x2000000L);
case 111:
return jjMoveStringLiteralDfa11_0(active0, 0x400L);
case 116:
if ((active0 & 0x200L) != 0L)
return jjStartNfaWithStates_0(10, 9, 90);
return jjMoveStringLiteralDfa11_0(active0, 0x1000L);
default :
break;
}
return jjStartNfa_0(9, active0);
}
private final int jjMoveStringLiteralDfa11_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(9, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(10, active0);
return 11;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa12_0(active0, 0x2000000L);
case 100:
if ((active0 & 0x400000L) != 0L)
return jjStartNfaWithStates_0(11, 22, 90);
break;
case 108:
return jjMoveStringLiteralDfa12_0(active0, 0x4000000L);
case 110:
return jjMoveStringLiteralDfa12_0(active0, 0x400L);
case 116:
return jjMoveStringLiteralDfa12_0(active0, 0x4000L);
case 117:
return jjMoveStringLiteralDfa12_0(active0, 0x1000L);
default :
break;
}
return jjStartNfa_0(10, active0);
}
private final int jjMoveStringLiteralDfa12_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(10, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(11, active0);
return 12;
}
switch(curChar)
{
case 97:
return jjMoveStringLiteralDfa13_0(active0, 0x4000000L);
case 104:
if ((active0 & 0x4000L) != 0L)
return jjStartNfaWithStates_0(12, 14, 90);
break;
case 114:
return jjMoveStringLiteralDfa13_0(active0, 0x1000L);
case 115:
return jjMoveStringLiteralDfa13_0(active0, 0x2000000L);
case 116:
return jjMoveStringLiteralDfa13_0(active0, 0x400L);
default :
break;
}
return jjStartNfa_0(11, active0);
}
private final int jjMoveStringLiteralDfa13_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(11, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(12, active0);
return 13;
}
switch(curChar)
{
case 101:
if ((active0 & 0x1000L) != 0L)
return jjStartNfaWithStates_0(13, 12, 90);
break;
case 114:
return jjMoveStringLiteralDfa14_0(active0, 0x400L);
case 115:
if ((active0 & 0x2000000L) != 0L)
return jjStartNfaWithStates_0(13, 25, 90);
return jjMoveStringLiteralDfa14_0(active0, 0x4000000L);
default :
break;
}
return jjStartNfa_0(12, active0);
}
private final int jjMoveStringLiteralDfa14_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(12, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(13, active0);
return 14;
}
switch(curChar)
{
case 111:
return jjMoveStringLiteralDfa15_0(active0, 0x400L);
case 115:
if ((active0 & 0x4000000L) != 0L)
return jjStartNfaWithStates_0(14, 26, 90);
break;
default :
break;
}
return jjStartNfa_0(13, active0);
}
private final int jjMoveStringLiteralDfa15_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(13, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(14, active0);
return 15;
}
switch(curChar)
{
case 108:
if ((active0 & 0x400L) != 0L)
return jjStartNfaWithStates_0(15, 10, 90);
break;
default :
break;
}
return jjStartNfa_0(14, active0);
}
private final void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private final void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private final void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private final void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
private final void jjCheckNAddStates(int start)
{
jjCheckNAdd(jjnextStates[start]);
jjCheckNAdd(jjnextStates[start + 1]);
}
static final long[] jjbitVec0 = {
0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec2 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private final int jjMoveNfa_0(int startState, int curPos)
{
int[] nextStates;
int startsAt = 0;
jjnewStateCnt = 88;
int i = 1;
jjstateSet[0] = startState;
int j, kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 88:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
else if (curChar == 58)
{
if (kind > 32)
kind = 32;
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
else if (curChar == 58)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 89:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
else if (curChar == 58)
{
if (kind > 32)
kind = 32;
}
if (curChar == 58)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 90:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
break;
case 45:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 27)
kind = 27;
jjCheckNAddTwoStates(32, 33);
}
else if (curChar == 47)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 4:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
break;
case 14:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
else if (curChar == 58)
{
if (kind > 32)
kind = 32;
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
else if (curChar == 58)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 5:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
else if (curChar == 58)
{
if (kind > 32)
kind = 32;
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
else if (curChar == 58)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 6:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 27)
kind = 27;
jjCheckNAddStates(0, 4);
}
else if (curChar == 47)
jjAddStates(5, 6);
else if (curChar == 46)
jjstateSet[jjnewStateCnt++] = 45;
else if (curChar == 34)
jjCheckNAddStates(7, 9);
if (curChar == 47)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
else if (curChar == 46)
jjCheckNAdd(32);
break;
case 21:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
else if (curChar == 58)
{
if (kind > 32)
kind = 32;
}
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
else if (curChar == 58)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
break;
case 31:
if (curChar == 46)
jjCheckNAdd(32);
break;
case 32:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAddTwoStates(32, 33);
break;
case 34:
if ((0x280000000000L & l) != 0L)
jjCheckNAdd(35);
break;
case 35:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAdd(35);
break;
case 36:
if (curChar == 34)
jjCheckNAddStates(7, 9);
break;
case 37:
if ((0xfffffffbffffdbffL & l) != 0L)
jjCheckNAddStates(7, 9);
break;
case 39:
if ((0x8400000000L & l) != 0L)
jjCheckNAddStates(7, 9);
break;
case 40:
if (curChar == 34 && kind > 28)
kind = 28;
break;
case 41:
if ((0xff000000000000L & l) != 0L)
jjCheckNAddStates(10, 13);
break;
case 42:
if ((0xff000000000000L & l) != 0L)
jjCheckNAddStates(7, 9);
break;
case 43:
if ((0xf000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 44;
break;
case 44:
if ((0xff000000000000L & l) != 0L)
jjCheckNAdd(42);
break;
case 46:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(46, 47);
break;
case 47:
if (curChar != 47)
break;
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
break;
case 48:
if (curChar == 46)
jjstateSet[jjnewStateCnt++] = 45;
break;
case 63:
if (curChar == 47)
jjAddStates(5, 6);
break;
case 64:
if (curChar == 47)
jjCheckNAddStates(14, 16);
break;
case 65:
if ((0xffffffffffffdbffL & l) != 0L)
jjCheckNAddStates(14, 16);
break;
case 66:
if ((0x2400L & l) != 0L && kind > 5)
kind = 5;
break;
case 67:
if (curChar == 10 && kind > 5)
kind = 5;
break;
case 68:
if (curChar == 13)
jjstateSet[jjnewStateCnt++] = 67;
break;
case 69:
if (curChar == 42)
jjCheckNAddTwoStates(70, 71);
break;
case 70:
if ((0xfffffbffffffffffL & l) != 0L)
jjCheckNAddTwoStates(70, 71);
break;
case 71:
if (curChar == 42)
jjCheckNAddStates(17, 19);
break;
case 72:
if ((0xffff7bffffffffffL & l) != 0L)
jjCheckNAddTwoStates(73, 71);
break;
case 73:
if ((0xfffffbffffffffffL & l) != 0L)
jjCheckNAddTwoStates(73, 71);
break;
case 74:
if (curChar == 47 && kind > 6)
kind = 6;
break;
case 76:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
break;
case 78:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
break;
case 79:
if (curChar != 58)
break;
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
break;
case 80:
if (curChar == 58 && kind > 32)
kind = 32;
break;
case 81:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAddStates(0, 4);
break;
case 82:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAdd(82);
break;
case 83:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(83, 31);
break;
case 84:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddTwoStates(84, 85);
break;
case 86:
if ((0x280000000000L & l) != 0L)
jjCheckNAdd(87);
break;
case 87:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 27)
kind = 27;
jjCheckNAdd(87);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 88:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
break;
case 89:
case 78:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
break;
case 90:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
break;
case 4:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
if (curChar == 115)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 14:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
if (curChar == 97)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 5:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
if (curChar == 105)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 6:
if ((0x7fffffe07fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAddStates(20, 22);
}
else if (curChar == 92)
{
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
}
if ((0x7fffffe00000000L & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
else if ((0x7fffffeL & l) != 0L)
{
if (kind > 35)
kind = 35;
}
if (curChar == 114)
jjAddStates(23, 24);
else if (curChar == 102)
jjstateSet[jjnewStateCnt++] = 29;
else if (curChar == 116)
jjstateSet[jjnewStateCnt++] = 25;
else if (curChar == 110)
jjstateSet[jjnewStateCnt++] = 21;
else if (curChar == 115)
jjstateSet[jjnewStateCnt++] = 14;
else if (curChar == 100)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 21:
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 30)
kind = 30;
jjCheckNAdd(78);
}
if ((0x7fffffe87fffffeL & l) != 0L)
{
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
}
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 20;
break;
case 0:
if (curChar == 100 && kind > 16)
kind = 16;
break;
case 1:
if (curChar == 114)
jjstateSet[jjnewStateCnt++] = 0;
break;
case 2:
if (curChar == 97)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 3:
if (curChar == 99)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 7:
if (curChar == 115 && kind > 18)
kind = 18;
break;
case 8:
case 16:
if (curChar == 117)
jjCheckNAdd(7);
break;
case 9:
if (curChar == 99)
jjstateSet[jjnewStateCnt++] = 8;
break;
case 10:
if (curChar == 111)
jjstateSet[jjnewStateCnt++] = 9;
break;
case 11:
if (curChar == 70)
jjstateSet[jjnewStateCnt++] = 10;
break;
case 12:
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 11;
break;
case 13:
if (curChar == 109)
jjstateSet[jjnewStateCnt++] = 12;
break;
case 15:
if (curChar == 115)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 17:
if (curChar == 99)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 18:
if (curChar == 111)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 19:
if (curChar == 70)
jjstateSet[jjnewStateCnt++] = 18;
break;
case 20:
if (curChar == 119)
jjstateSet[jjnewStateCnt++] = 19;
break;
case 22:
if (curChar == 110)
jjstateSet[jjnewStateCnt++] = 21;
break;
case 23:
if (curChar == 101 && kind > 23)
kind = 23;
break;
case 24:
if (curChar == 117)
jjCheckNAdd(23);
break;
case 25:
if (curChar == 114)
jjstateSet[jjnewStateCnt++] = 24;
break;
case 26:
if (curChar == 116)
jjstateSet[jjnewStateCnt++] = 25;
break;
case 27:
if (curChar == 115)
jjCheckNAdd(23);
break;
case 28:
if (curChar == 108)
jjstateSet[jjnewStateCnt++] = 27;
break;
case 29:
if (curChar == 97)
jjstateSet[jjnewStateCnt++] = 28;
break;
case 30:
if (curChar == 102)
jjstateSet[jjnewStateCnt++] = 29;
break;
case 33:
if ((0x2000000020L & l) != 0L)
jjAddStates(25, 26);
break;
case 37:
if ((0xffffffffefffffffL & l) != 0L)
jjCheckNAddStates(7, 9);
break;
case 38:
if (curChar == 92)
jjAddStates(27, 29);
break;
case 39:
if ((0x14404410000000L & l) != 0L)
jjCheckNAddStates(7, 9);
break;
case 46:
if ((0x7fffffe87fffffeL & l) != 0L)
jjCheckNAddTwoStates(46, 47);
break;
case 47:
if (curChar != 92)
break;
if (kind > 31)
kind = 31;
jjCheckNAddTwoStates(46, 47);
break;
case 49:
if ((0x7fffffeL & l) != 0L && kind > 35)
kind = 35;
break;
case 50:
if (curChar == 114)
jjAddStates(23, 24);
break;
case 51:
if (curChar == 101 && kind > 16)
kind = 16;
break;
case 52:
if (curChar == 117)
jjCheckNAdd(51);
break;
case 53:
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 52;
break;
case 54:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 53;
break;
case 55:
if (curChar == 113)
jjstateSet[jjnewStateCnt++] = 54;
break;
case 56:
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 55;
break;
case 57:
if (curChar == 118)
jjCheckNAdd(51);
break;
case 58:
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 57;
break;
case 59:
if (curChar == 105)
jjstateSet[jjnewStateCnt++] = 58;
break;
case 60:
if (curChar == 114)
jjstateSet[jjnewStateCnt++] = 59;
break;
case 61:
if (curChar == 116)
jjstateSet[jjnewStateCnt++] = 60;
break;
case 62:
if (curChar == 101)
jjstateSet[jjnewStateCnt++] = 61;
break;
case 65:
jjAddStates(14, 16);
break;
case 70:
jjCheckNAddTwoStates(70, 71);
break;
case 72:
case 73:
jjCheckNAddTwoStates(73, 71);
break;
case 75:
if ((0x7fffffe00000000L & l) == 0L)
break;
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
break;
case 76:
if ((0x7fffffe87fffffeL & l) == 0L)
break;
if (kind > 29)
kind = 29;
jjCheckNAdd(76);
break;
case 77:
if ((0x7fffffe07fffffeL & l) == 0L)
break;
if (kind > 30)
kind = 30;
jjCheckNAddStates(20, 22);
break;
case 85:
if ((0x2000000020L & l) != 0L)
jjAddStates(30, 31);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int hiByte = (int)(curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
MatchLoop: do
{
switch(jjstateSet[--i])
{
case 37:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjAddStates(7, 9);
break;
case 65:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjAddStates(14, 16);
break;
case 70:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjCheckNAddTwoStates(70, 71);
break;
case 72:
case 73:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjCheckNAddTwoStates(73, 71);
break;
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 88 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
82, 83, 31, 84, 85, 64, 69, 37, 38, 40, 37, 38, 42, 40, 65, 66,
68, 71, 72, 74, 78, 79, 80, 56, 62, 34, 35, 39, 41, 43, 86, 87,
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
switch(hiByte)
{
case 0:
return ((jjbitVec2[i2] & l2) != 0L);
default :
if ((jjbitVec0[i1] & l1) != 0L)
return true;
return false;
}
}
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, "\115\101\123",
"\141\147\145\156\164\163", "\145\156\166\151\162\157\156\155\145\156\164",
"\145\170\145\143\165\164\151\157\156\103\157\156\164\162\157\154", "\141\164", "\151\156\146\162\141\163\164\162\165\143\164\165\162\145",
"\143\154\141\163\163\160\141\164\150", "\141\163\154\123\157\165\162\143\145\120\141\164\150",
"\145\166\145\156\164\163", null, "\151\156\164\102\145\154\163", null, "\156\162\143\142\160",
"\166\145\162\142\157\163\145", "\144\151\162\145\143\164\151\166\145\163",
"\163\171\156\143\150\162\157\156\151\163\145\144", null, "\141\147\145\156\164\103\154\141\163\163",
"\141\147\145\156\164\101\162\143\150\103\154\141\163\163", "\142\145\154\151\145\146\102\141\163\145\103\154\141\163\163", null, null,
null, null, null, null, null, null, null, null, "\173", "\175", "\72", "\43", "\73",
"\56", "\50", "\54", "\51", "\133", "\135", "\75", };
public static final String[] lexStateNames = {
"DEFAULT",
};
static final long[] jjtoToken = {
0x1ffffffffff81L,
};
static final long[] jjtoSkip = {
0x7eL,
};
protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[88];
private final int[] jjstateSet = new int[176];
protected char curChar;
public mas2jTokenManager(SimpleCharStream stream){
if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
public mas2jTokenManager(SimpleCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private final void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 88; i-- > 0;)
jjrounds[i] = 0x80000000;
}
public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
public void SwitchTo(int lexState)
{
if (lexState >= 1 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
Token t = Token.newToken(jjmatchedKind);
t.kind = jjmatchedKind;
String im = jjstrLiteralImages[jjmatchedKind];
t.image = (im == null) ? input_stream.GetImage() : im;
t.beginLine = input_stream.getBeginLine();
t.beginColumn = input_stream.getBeginColumn();
t.endLine = input_stream.getEndLine();
t.endColumn = input_stream.getEndColumn();
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
public Token getNextToken()
{
int kind;
Token specialToken = null;
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
try { input_stream.backup(0);
while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L)
curChar = input_stream.BeginToken();
}
catch (java.io.IOException e1) { continue EOFLoop; }
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
return matchedToken;
}
else
{
continue EOFLoop;
}
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.cmmn.entity.runtime;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
import java.util.List;
import org.camunda.bpm.engine.exception.NotValidException;
import org.camunda.bpm.engine.impl.AbstractVariableQueryImpl;
import org.camunda.bpm.engine.impl.Page;
import org.camunda.bpm.engine.impl.QueryOperator;
import org.camunda.bpm.engine.impl.QueryOrderingProperty;
import org.camunda.bpm.engine.impl.cmmn.execution.CaseExecutionState;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
import org.camunda.bpm.engine.runtime.CaseExecution;
import org.camunda.bpm.engine.runtime.CaseExecutionQuery;
/**
* @author Roman Smirnov
*
*/
public class CaseExecutionQueryImpl extends AbstractVariableQueryImpl<CaseExecutionQuery, CaseExecution> implements CaseExecutionQuery {
private static final long serialVersionUID = 1L;
protected String caseDefinitionId;
protected String caseDefinitionKey;
protected String activityId;
protected String caseExecutionId;
protected String caseInstanceId;
protected String businessKey;
protected CaseExecutionState state;
protected Boolean required = false;
protected Boolean repeatable = false;
protected Boolean repetition = false;
// Not used by end-users, but needed for dynamic ibatis query
protected String superProcessInstanceId;
protected String subProcessInstanceId;
protected String superCaseInstanceId;
protected String subCaseInstanceId;
protected String deploymentId;
public CaseExecutionQueryImpl() {
}
public CaseExecutionQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
public CaseExecutionQuery caseInstanceId(String caseInstanceId) {
ensureNotNull(NotValidException.class, "caseInstanceId", caseInstanceId);
this.caseInstanceId = caseInstanceId;
return this;
}
public CaseExecutionQuery caseDefinitionId(String caseDefinitionId) {
ensureNotNull(NotValidException.class, "caseDefinitionId", caseDefinitionId);
this.caseDefinitionId = caseDefinitionId;
return this;
}
public CaseExecutionQuery caseDefinitionKey(String caseDefinitionKey) {
ensureNotNull(NotValidException.class, "caseDefinitionKey", caseDefinitionKey);
this.caseDefinitionKey = caseDefinitionKey;
return this;
}
public CaseExecutionQuery caseInstanceBusinessKey(String caseInstanceBusinessKey) {
ensureNotNull(NotValidException.class, "caseInstanceBusinessKey", caseInstanceBusinessKey);
this.businessKey = caseInstanceBusinessKey;
return this;
}
public CaseExecutionQuery caseExecutionId(String caseExecutionId) {
ensureNotNull(NotValidException.class, "caseExecutionId", caseExecutionId);
this.caseExecutionId = caseExecutionId;
return this;
}
public CaseExecutionQuery activityId(String activityId) {
ensureNotNull(NotValidException.class, "activityId", activityId);
this.activityId = activityId;
return this;
}
public CaseExecutionQuery required() {
this.required = true;
return this;
}
public CaseExecutionQuery repeatable() {
this.repeatable = true;
return this;
}
public CaseExecutionQuery repetition() {
this.repetition = true;
return this;
}
public CaseExecutionQuery available() {
state = CaseExecutionState.AVAILABLE;
return this;
}
public CaseExecutionQuery enabled() {
state = CaseExecutionState.ENABLED;
return this;
}
public CaseExecutionQuery active() {
state = CaseExecutionState.ACTIVE;
return this;
}
public CaseExecutionQuery disabled() {
state = CaseExecutionState.DISABLED;
return this;
}
public CaseExecutionQuery caseInstanceVariableValueEquals(String name, Object value) {
addVariable(name, value, QueryOperator.EQUALS, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueNotEquals(String name, Object value) {
addVariable(name, value, QueryOperator.NOT_EQUALS, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueGreaterThan(String name, Object value) {
addVariable(name, value, QueryOperator.GREATER_THAN, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueGreaterThanOrEqual(String name, Object value) {
addVariable(name, value, QueryOperator.GREATER_THAN_OR_EQUAL, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueLessThan(String name, Object value) {
addVariable(name, value, QueryOperator.LESS_THAN, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueLessThanOrEqual(String name, Object value) {
addVariable(name, value, QueryOperator.LESS_THAN_OR_EQUAL, false);
return this;
}
public CaseExecutionQuery caseInstanceVariableValueLike(String name, String value) {
addVariable(name, value, QueryOperator.LIKE, false);
return this;
}
// order by ///////////////////////////////////////////
public CaseExecutionQuery orderByCaseExecutionId() {
orderBy(CaseExecutionQueryProperty.CASE_EXECUTION_ID);
return this;
}
public CaseExecutionQuery orderByCaseDefinitionKey() {
orderBy(new QueryOrderingProperty(QueryOrderingProperty.RELATION_CASE_DEFINITION,
CaseExecutionQueryProperty.CASE_DEFINITION_KEY));
return this;
}
public CaseExecutionQuery orderByCaseDefinitionId() {
orderBy(CaseExecutionQueryProperty.CASE_DEFINITION_ID);
return this;
}
// results ////////////////////////////////////////////
public long executeCount(CommandContext commandContext) {
checkQueryOk();
ensureVariablesInitialized();
return commandContext
.getCaseExecutionManager()
.findCaseExecutionCountByQueryCriteria(this);
}
public List<CaseExecution> executeList(CommandContext commandContext, Page page) {
checkQueryOk();
ensureVariablesInitialized();
List<CaseExecution> result = commandContext
.getCaseExecutionManager()
.findCaseExecutionsByQueryCriteria(this, page);
for (CaseExecution caseExecution : result) {
CaseExecutionEntity caseExecutionEntity = (CaseExecutionEntity) caseExecution;
// initializes the name, type and description
// of the activity on current case execution
caseExecutionEntity.getActivity();
}
return result;
}
// getters /////////////////////////////////////////////
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public String getCaseDefinitionKey() {
return caseDefinitionKey;
}
public String getActivityId() {
return activityId;
}
public String getCaseExecutionId() {
return caseExecutionId;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public String getBusinessKey() {
return businessKey;
}
public CaseExecutionState getState() {
return state;
}
public boolean isCaseInstancesOnly() {
return false;
}
public String getSuperProcessInstanceId() {
return superProcessInstanceId;
}
public String getSubProcessInstanceId() {
return subProcessInstanceId;
}
public String getSuperCaseInstanceId() {
return superCaseInstanceId;
}
public String getSubCaseInstanceId() {
return subCaseInstanceId;
}
public String getDeploymentId() {
return deploymentId;
}
public Boolean isRequired() {
return required;
}
public Boolean isRepeatable() {
return repeatable;
}
public Boolean isRepetition() {
return repetition;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.