gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3.builder; import org.apache.commons.lang3.ObjectUtils; /** * <p>Assists in implementing {@link Object#toString()} methods.</p> * * <p>This class enables a good and consistent <code>toString()</code> to be built for any * class or object. This class aims to simplify the process by:</p> * <ul> * <li>allowing field names</li> * <li>handling all types consistently</li> * <li>handling nulls consistently</li> * <li>outputting arrays and multi-dimensional arrays</li> * <li>enabling the detail level to be controlled for Objects and Collections</li> * <li>handling class hierarchies</li> * </ul> * * <p>To use this class write code as follows:</p> * * <pre> * public class Person { * String name; * int age; * boolean smoker; * * ... * * public String toString() { * return new ToStringBuilder(this). * append("name", name). * append("age", age). * append("smoker", smoker). * toString(); * } * } * </pre> * * <p>This will produce a toString of the format: * <code>Person@7f54[name=Stephen,age=29,smoker=false]</code></p> * * <p>To add the superclass <code>toString</code>, use {@link #appendSuper}. * To append the <code>toString</code> from an object that is delegated * to (or any other object), use {@link #appendToString}.</p> * * <p>Alternatively, there is a method that uses reflection to determine * the fields to test. Because these fields are usually private, the method, * <code>reflectionToString</code>, uses <code>AccessibleObject.setAccessible</code> to * change the visibility of the fields. This will fail under a security manager, * unless the appropriate permissions are set up correctly. It is also * slower than testing explicitly.</p> * * <p>A typical invocation for this method would look like:</p> * * <pre> * public String toString() { * return ToStringBuilder.reflectionToString(this); * } * </pre> * * <p>You can also use the builder to debug 3rd party objects:</p> * * <pre> * System.out.println("An object: " + ToStringBuilder.reflectionToString(anObject)); * </pre> * * <p>The exact format of the <code>toString</code> is determined by * the {@link ToStringStyle} passed into the constructor.</p> * * @since 1.0 */ public class ToStringBuilder implements Builder<String> { /** * The default style of output to use, not null. */ private static volatile ToStringStyle defaultStyle = ToStringStyle.DEFAULT_STYLE; //---------------------------------------------------------------------------- /** * <p>Gets the default <code>ToStringStyle</code> to use.</p> * * <p>This method gets a singleton default value, typically for the whole JVM. * Changing this default should generally only be done during application startup. * It is recommended to pass a <code>ToStringStyle</code> to the constructor instead * of using this global default.</p> * * <p>This method can be used from multiple threads. * Internally, a <code>volatile</code> variable is used to provide the guarantee * that the latest value set using {@link #setDefaultStyle} is the value returned. * It is strongly recommended that the default style is only changed during application startup.</p> * * <p>One reason for changing the default could be to have a verbose style during * development and a compact style in production.</p> * * @return the default <code>ToStringStyle</code>, never null */ public static ToStringStyle getDefaultStyle() { return defaultStyle; } /** * <p>Sets the default <code>ToStringStyle</code> to use.</p> * * <p>This method sets a singleton default value, typically for the whole JVM. * Changing this default should generally only be done during application startup. * It is recommended to pass a <code>ToStringStyle</code> to the constructor instead * of changing this global default.</p> * * <p>This method is not intended for use from multiple threads. * Internally, a <code>volatile</code> variable is used to provide the guarantee * that the latest value set is the value returned from {@link #getDefaultStyle}.</p> * * @param style the default <code>ToStringStyle</code> * @throws IllegalArgumentException if the style is <code>null</code> */ public static void setDefaultStyle(final ToStringStyle style) { if (style == null) { throw new IllegalArgumentException("The style must not be null"); } defaultStyle = style; } //---------------------------------------------------------------------------- /** * <p>Uses <code>ReflectionToStringBuilder</code> to generate a * <code>toString</code> for the specified object.</p> * * @param object the Object to be output * @return the String result * @see ReflectionToStringBuilder#toString(Object) */ public static String reflectionToString(final Object object) { return ReflectionToStringBuilder.toString(object); } /** * <p>Uses <code>ReflectionToStringBuilder</code> to generate a * <code>toString</code> for the specified object.</p> * * @param object the Object to be output * @param style the style of the <code>toString</code> to create, may be <code>null</code> * @return the String result * @see ReflectionToStringBuilder#toString(Object,ToStringStyle) */ public static String reflectionToString(final Object object, final ToStringStyle style) { return ReflectionToStringBuilder.toString(object, style); } /** * <p>Uses <code>ReflectionToStringBuilder</code> to generate a * <code>toString</code> for the specified object.</p> * * @param object the Object to be output * @param style the style of the <code>toString</code> to create, may be <code>null</code> * @param outputTransients whether to include transient fields * @return the String result * @see ReflectionToStringBuilder#toString(Object,ToStringStyle,boolean) */ public static String reflectionToString(final Object object, final ToStringStyle style, final boolean outputTransients) { return ReflectionToStringBuilder.toString(object, style, outputTransients, false, null); } /** * <p>Uses <code>ReflectionToStringBuilder</code> to generate a * <code>toString</code> for the specified object.</p> * * @param <T> the type of the object * @param object the Object to be output * @param style the style of the <code>toString</code> to create, may be <code>null</code> * @param outputTransients whether to include transient fields * @param reflectUpToClass the superclass to reflect up to (inclusive), may be <code>null</code> * @return the String result * @see ReflectionToStringBuilder#toString(Object,ToStringStyle,boolean,boolean,Class) * @since 2.0 */ public static <T> String reflectionToString( final T object, final ToStringStyle style, final boolean outputTransients, final Class<? super T> reflectUpToClass) { return ReflectionToStringBuilder.toString(object, style, outputTransients, false, reflectUpToClass); } //---------------------------------------------------------------------------- /** * Current toString buffer, not null. */ private final StringBuffer buffer; /** * The object being output, may be null. */ private final Object object; /** * The style of output to use, not null. */ private final ToStringStyle style; /** * <p>Constructs a builder for the specified object using the default output style.</p> * * <p>This default style is obtained from {@link #getDefaultStyle()}.</p> * * @param object the Object to build a <code>toString</code> for, not recommended to be null */ public ToStringBuilder(final Object object) { this(object, null, null); } /** * <p>Constructs a builder for the specified object using the a defined output style.</p> * * <p>If the style is <code>null</code>, the default style is used.</p> * * @param object the Object to build a <code>toString</code> for, not recommended to be null * @param style the style of the <code>toString</code> to create, null uses the default style */ public ToStringBuilder(final Object object, final ToStringStyle style) { this(object, style, null); } /** * <p>Constructs a builder for the specified object.</p> * * <p>If the style is <code>null</code>, the default style is used.</p> * * <p>If the buffer is <code>null</code>, a new one is created.</p> * * @param object the Object to build a <code>toString</code> for, not recommended to be null * @param style the style of the <code>toString</code> to create, null uses the default style * @param buffer the <code>StringBuffer</code> to populate, may be null */ public ToStringBuilder(final Object object, ToStringStyle style, StringBuffer buffer) { if (style == null) { style = getDefaultStyle(); } if (buffer == null) { buffer = new StringBuffer(512); } this.buffer = buffer; this.style = style; this.object = object; style.appendStart(buffer, object); } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>boolean</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final boolean value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>boolean</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final boolean[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>byte</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final byte value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>byte</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final byte[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>char</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final char value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>char</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final char[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>double</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final double value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>double</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final double[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>float</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final float value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>float</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final float[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> an <code>int</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final int value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> an <code>int</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final int[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>long</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final long value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>long</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final long[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> an <code>Object</code> * value.</p> * * @param obj the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final Object obj) { style.append(buffer, null, obj, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> an <code>Object</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final Object[] array) { style.append(buffer, null, array, null); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>short</code> * value.</p> * * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final short value) { style.append(buffer, null, value); return this; } //---------------------------------------------------------------------------- /** * <p>Append to the <code>toString</code> a <code>short</code> * array.</p> * * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final short[] array) { style.append(buffer, null, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>boolean</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final boolean value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>boolean</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>hashCode</code> * @return this */ public ToStringBuilder append(final String fieldName, final boolean[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>boolean</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final boolean[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>byte</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final byte value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>byte</code> array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final byte[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>byte</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array. * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final byte[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> a <code>char</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final char value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>char</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final char[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>char</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final char[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> a <code>double</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final double value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>double</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final double[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>double</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final double[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>float</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final float value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>float</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final float[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>float</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final float[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>int</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final int value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> an <code>int</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final int[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> an <code>int</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final int[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> a <code>long</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final long value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>long</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final long[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>long</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final long[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>Object</code> * value.</p> * * @param fieldName the field name * @param obj the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final Object obj) { style.append(buffer, fieldName, obj, null); return this; } /** * <p>Append to the <code>toString</code> an <code>Object</code> * value.</p> * * @param fieldName the field name * @param obj the value to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, * <code>false</code> for summary info * @return this */ public ToStringBuilder append(final String fieldName, final Object obj, final boolean fullDetail) { style.append(buffer, fieldName, obj, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>Object</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final Object[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> an <code>Object</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final Object[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Append to the <code>toString</code> an <code>short</code> * value.</p> * * @param fieldName the field name * @param value the value to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final short value) { style.append(buffer, fieldName, value); return this; } /** * <p>Append to the <code>toString</code> a <code>short</code> * array.</p> * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @return this */ public ToStringBuilder append(final String fieldName, final short[] array) { style.append(buffer, fieldName, array, null); return this; } /** * <p>Append to the <code>toString</code> a <code>short</code> * array.</p> * * <p>A boolean parameter controls the level of detail to show. * Setting <code>true</code> will output the array in full. Setting * <code>false</code> will output a summary, typically the size of * the array. * * @param fieldName the field name * @param array the array to add to the <code>toString</code> * @param fullDetail <code>true</code> for detail, <code>false</code> * for summary info * @return this */ public ToStringBuilder append(final String fieldName, final short[] array, final boolean fullDetail) { style.append(buffer, fieldName, array, Boolean.valueOf(fullDetail)); return this; } /** * <p>Appends with the same format as the default <code>Object toString() * </code> method. Appends the class name followed by * {@link System#identityHashCode(java.lang.Object)}.</p> * * @param srcObject the <code>Object</code> whose class name and id to output * @return this * @since 2.0 */ public ToStringBuilder appendAsObjectToString(final Object srcObject) { ObjectUtils.identityToString(this.getStringBuffer(), srcObject); return this; } //---------------------------------------------------------------------------- /** * <p>Append the <code>toString</code> from the superclass.</p> * * <p>This method assumes that the superclass uses the same <code>ToStringStyle</code> * as this one.</p> * * <p>If <code>superToString</code> is <code>null</code>, no change is made.</p> * * @param superToString the result of <code>super.toString()</code> * @return this * @since 2.0 */ public ToStringBuilder appendSuper(final String superToString) { if (superToString != null) { style.appendSuper(buffer, superToString); } return this; } /** * <p>Append the <code>toString</code> from another object.</p> * * <p>This method is useful where a class delegates most of the implementation of * its properties to another class. You can then call <code>toString()</code> on * the other class and pass the result into this method.</p> * * <pre> * private AnotherObject delegate; * private String fieldInThisClass; * * public String toString() { * return new ToStringBuilder(this). * appendToString(delegate.toString()). * append(fieldInThisClass). * toString(); * }</pre> * * <p>This method assumes that the other object uses the same <code>ToStringStyle</code> * as this one.</p> * * <p>If the <code>toString</code> is <code>null</code>, no change is made.</p> * * @param toString the result of <code>toString()</code> on another object * @return this * @since 2.0 */ public ToStringBuilder appendToString(final String toString) { if (toString != null) { style.appendToString(buffer, toString); } return this; } /** * <p>Returns the <code>Object</code> being output.</p> * * @return The object being output. * @since 2.0 */ public Object getObject() { return object; } /** * <p>Gets the <code>StringBuffer</code> being populated.</p> * * @return the <code>StringBuffer</code> being populated */ public StringBuffer getStringBuffer() { return buffer; } //---------------------------------------------------------------------------- /** * <p>Gets the <code>ToStringStyle</code> being used.</p> * * @return the <code>ToStringStyle</code> being used * @since 2.0 */ public ToStringStyle getStyle() { return style; } /** * <p>Returns the built <code>toString</code>.</p> * * <p>This method appends the end of data indicator, and can only be called once. * Use {@link #getStringBuffer} to get the current string state.</p> * * <p>If the object is <code>null</code>, return the style's <code>nullText</code></p> * * @return the String <code>toString</code> */ @Override public String toString() { if (this.getObject() == null) { this.getStringBuffer().append(this.getStyle().getNullText()); } else { style.appendEnd(this.getStringBuffer(), this.getObject()); } return this.getStringBuffer().toString(); } /** * Returns the String that was build as an object representation. The * default implementation utilizes the {@link #toString()} implementation. * * @return the String <code>toString</code> * * @see #toString() * * @since 3.0 */ @Override public String build() { return toString(); } }
package com.virtualfactory.entity; import com.virtualfactory.utils.ObjectState; import com.virtualfactory.utils.Unit; import com.virtualfactory.utils.Direction; import com.virtualfactory.utils.Utils; import com.virtualfactory.engine.GameEngine; import com.virtualfactory.exceptions.ExceededCapacityException; import com.virtualfactory.exceptions.InsufficientPartsException; /** * * */ public class E_Bucket { private int idBucket; private int idStation; private int capacity = 0; private Unit unit; private int size = 0; private int idPart; private Direction direction; private int unitsToArrive=0; private int unitsToRemove=0; private int currentLocationX; private int currentLocationZ; private int oldSize = 0; private double totalCost = 0; private double costPerHour; private ObjectState state; private E_Game initialGame = null; private E_Game finalGame = null; private E_Game currentGame = null; public boolean activateLaterDeactivation = false; private GameEngine gameEngine = null; public E_Bucket(int idBucket, int idStation, int capacity, Unit unit, int size, int idPart, Direction direction, int unitsToArrive) { this.idBucket = idBucket; this.idStation = idStation; this.capacity = capacity; this.unit = unit; this.size = size; this.idPart = idPart; this.direction = direction; this.unitsToArrive = unitsToArrive; } public E_Bucket() { } public GameEngine getGameEngine() { return gameEngine; } public void setGameEngine(GameEngine gameEngine) { this.gameEngine = gameEngine; } public double getCostPerHour() { return costPerHour; } public void setCostPerHour(double costPerHour) { this.costPerHour = costPerHour; } private void setInitialTime(){ if (initialGame != null && finalGame != null){ double activeMin = (double)initialGame.getDifferenceTimes_Minutes(finalGame); totalCost += Utils.formatValue2Dec((activeMin/60)*costPerHour); } initialGame = new E_Game(); initialGame.setCurrentMinute(currentGame.getCurrentMinute()); initialGame.setCurrentHour(currentGame.getCurrentHour()); initialGame.setCurrentDay(currentGame.getCurrentDay()); initialGame.setCurrentMonth(currentGame.getCurrentMonth()); } private void setFinalTime(){ finalGame = new E_Game(); finalGame.setCurrentMinute(currentGame.getCurrentMinute()); finalGame.setCurrentHour(currentGame.getCurrentHour()); finalGame.setCurrentDay(currentGame.getCurrentDay()); finalGame.setCurrentMonth(currentGame.getCurrentMonth()); } public double updateAndGetTotalCost(){ double activeMin = 0; if (initialGame == null) return totalCost; if (state.equals(ObjectState.Active)) activeMin = (double)initialGame.getDifferenceTimes_Minutes(currentGame); else activeMin = (double)initialGame.getDifferenceTimes_Minutes(finalGame); return totalCost + Utils.formatValue2Dec((activeMin/60)*costPerHour); } public ObjectState getState() { return state; } public void setState(ObjectState state) { this.state = state; if (state.equals(ObjectState.Active)) setInitialTime(); else setFinalTime(); if (gameEngine.getGameData().getMapUserStation() != null) if (gameEngine.getGameData().getMapUserStation().get(idStation) != null) gameEngine.getGameData().getMapUserStation().get(idStation).updateBucketsArrayState(this); } public double getTotalCost() { return totalCost; } public void setTotalCost(double totalCost) { this.totalCost = totalCost; } public E_Game getCurrentGame() { return currentGame; } public void setCurrentGame(E_Game currentGame) { this.currentGame = currentGame; } public int getOldSize() { return oldSize; } public void setOldSize(int oldSize) { this.oldSize = oldSize; } public int getCurrentLocationX() { return currentLocationX; } public void setCurrentLocationX(int currentLocationX) { this.currentLocationX = currentLocationX; } public int getCurrentLocationZ() { return currentLocationZ; } public void setCurrentLocationZ(int currentLocationZ) { this.currentLocationZ = currentLocationZ; } public int getCapacity() { return capacity; } public void setCapacity(int capacity) { this.capacity = capacity; } public Direction getDirection() { return direction; } public void setDirection(Direction direction) { this.direction = direction; } public int getIdBucket() { return idBucket; } public void setIdBucket(int idBucket) { this.idBucket = idBucket; } public int getIdPart() { return idPart; } public void setIdPart(int idPart) { this.idPart = idPart; } public int getIdStation() { return idStation; } public void setIdStation(int idStation) { this.idStation = idStation; } public int getSize() { return size; } public void setSize(int size) { this.size = size; if (this.size == 0 && activateLaterDeactivation){ activateLaterDeactivation = false; setState(ObjectState.Inactive); } } public Unit getUnit() { return unit; } public void setUnit(Unit unit) { this.unit = unit; } public int getUnitsToArrive() { return unitsToArrive; } public void setUnitsToArrive(int unitsToArrive) { this.unitsToArrive = unitsToArrive; } public int getUnitsToRemove() { return unitsToRemove; } public void setUnitsToRemove(int unitsToRemove) { this.unitsToRemove = unitsToRemove; } /**Adds the given amount of parts to this bucket. * @param amount - the number of parts to add. * @throws ExceededCapacityException if the current amount of parts * plus the amount to add exceeds the bucket's max capacity. */ private void add(int amount) throws ExceededCapacityException{ int newSize = this.size + amount; if(newSize <= this.capacity) this.size = newSize; else throw new ExceededCapacityException("This bucket's capacity cannot exceed "+this.capacity); } /**Subtracts the given amount of parts from this bucket. * @param amount - the number of parts to subtract. * @throws InsufficientPartsException if there are less parts on the * bucket than the amount to subtract. */ public void subtract(int amount) throws InsufficientPartsException{ int newSize = this.size - amount; if(newSize >= 0) this.size = newSize; else throw new InsufficientPartsException("There are less than" +amount+ "parts on the bucket. Parts in bucket: "+this.size); if (this.size == 0 && activateLaterDeactivation){ activateLaterDeactivation = false; setState(ObjectState.Inactive); } } /**Adds to the number of units to arrive the given amount of parts. * @param amount - the aditional number of parts that will arrive later. * @throws ExceededCapacityException if the current amount of parts * plus the new amount to arrive would exceed the bucket's max capacity. */ public void addUnitsToArrive(int amount) throws ExceededCapacityException{ int newToArrive = this.unitsToArrive + amount; if(newToArrive <= this.capacity) this.unitsToArrive = newToArrive; else throw new ExceededCapacityException("This bucket's capacity cannot exceed "+this.capacity); } /**Subtracts to the number of unist to arrive the given amount of parts, * then adds it to the bucket. * @param amount - the number of parts that just arrived. * @throws InsufficientPartsException if there are less parts reported to * arrive than the amount to subtract. * @throws ExceededCapacityException if the current amount of parts plus * the amount to add exceeds the bucket's max capacity. */ public void arrivedParts(int amount) throws InsufficientPartsException, ExceededCapacityException{ int newToArrive = this.unitsToArrive - amount; if(newToArrive >= 0){ this.add(amount); this.unitsToArrive = newToArrive; } else throw new InsufficientPartsException("There are only" +this.unitsToArrive+ "parts reported to arrive."); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.indices.mapper.MapperRegistry; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.unmodifiableMap; public class MapperService extends AbstractIndexComponent implements Closeable { /** * The reason why a mapping is being merged. */ public enum MergeReason { /** * Create or update a mapping. */ MAPPING_UPDATE, /** * Recovery of an existing mapping, for instance because of a restart, * if a shard was moved to a different node or for administrative * purposes. */ MAPPING_RECOVERY; } public static final String DEFAULT_MAPPING = "_default_"; public static final Setting<Long> INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50L, 0, Property.Dynamic, Property.IndexScope); public static final Setting<Long> INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.total_fields.limit", 1000L, 0, Property.Dynamic, Property.IndexScope); public static final Setting<Long> INDEX_MAPPING_DEPTH_LIMIT_SETTING = Setting.longSetting("index.mapping.depth.limit", 20L, 1, Property.Dynamic, Property.IndexScope); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; public static final Setting<Boolean> INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.Dynamic, Property.IndexScope); private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" ); @Deprecated public static final String PERCOLATOR_LEGACY_TYPE_NAME = ".percolator"; private final IndexAnalyzers indexAnalyzers; /** * Will create types automatically if they do not exists in the mapping definition yet */ private final boolean dynamic; private volatile String defaultMappingSource; private volatile Map<String, DocumentMapper> mappers = emptyMap(); private volatile FieldTypeLookup fieldTypes; private volatile Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(); private boolean hasNested = false; // updated dynamically to true when a nested object is added private boolean allEnabled = false; // updated dynamically to true when _all is enabled private final DocumentMapperParser documentParser; private final MapperAnalyzerWrapper indexAnalyzer; private final MapperAnalyzerWrapper searchAnalyzer; private final MapperAnalyzerWrapper searchQuoteAnalyzer; private volatile Map<String, MappedFieldType> unmappedFieldTypes = emptyMap(); private volatile Set<String> parentTypes = emptySet(); final MapperRegistry mapperRegistry; public MapperService(IndexSettings indexSettings, IndexAnalyzers indexAnalyzers, NamedXContentRegistry xContentRegistry, SimilarityService similarityService, MapperRegistry mapperRegistry, Supplier<QueryShardContext> queryShardContextSupplier) { super(indexSettings); this.indexAnalyzers = indexAnalyzers; this.fieldTypes = new FieldTypeLookup(); this.documentParser = new DocumentMapperParser(indexSettings, this, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, queryShardContextSupplier); this.indexAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultIndexAnalyzer(), p -> p.indexAnalyzer()); this.searchAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchAnalyzer(), p -> p.searchAnalyzer()); this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(indexAnalyzers.getDefaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); this.mapperRegistry = mapperRegistry; this.dynamic = this.indexSettings.getValue(INDEX_MAPPER_DYNAMIC_SETTING); defaultMappingSource = "{\"_default_\":{}}"; if (logger.isTraceEnabled()) { logger.trace("using dynamic[{}], default mapping source[{}]", dynamic, defaultMappingSource); } else if (logger.isDebugEnabled()) { logger.debug("using dynamic[{}]", dynamic); } } public boolean hasNested() { return this.hasNested; } /** * Returns true if the "_all" field is enabled on any type. */ public boolean allEnabled() { return this.allEnabled; } /** * returns an immutable iterator over current document mappers. * * @param includingDefaultMapping indicates whether the iterator should contain the {@link #DEFAULT_MAPPING} document mapper. * As is this not really an active type, you would typically set this to false */ public Iterable<DocumentMapper> docMappers(final boolean includingDefaultMapping) { return () -> { final Collection<DocumentMapper> documentMappers; if (includingDefaultMapping) { documentMappers = mappers.values(); } else { documentMappers = mappers.values().stream().filter(mapper -> !DEFAULT_MAPPING.equals(mapper.type())).collect(Collectors.toList()); } return Collections.unmodifiableCollection(documentMappers).iterator(); }; } public IndexAnalyzers getIndexAnalyzers() { return this.indexAnalyzers; } public DocumentMapperParser documentMapperParser() { return this.documentParser; } public static Map<String, Object> parseMapping(NamedXContentRegistry xContentRegistry, String mappingSource) throws Exception { try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(xContentRegistry, mappingSource)) { return parser.map(); } } /** * Update mapping by only merging the metadata that is different between received and stored entries */ public boolean updateMapping(IndexMetaData indexMetaData) throws IOException { assert indexMetaData.getIndex().equals(index()) : "index mismatch: expected " + index() + " but was " + indexMetaData.getIndex(); // go over and add the relevant mappings (or update them) final Set<String> existingMappers = new HashSet<>(mappers.keySet()); final Map<String, DocumentMapper> updatedEntries; try { // only update entries if needed updatedEntries = internalMerge(indexMetaData, MergeReason.MAPPING_RECOVERY, true, true); } catch (Exception e) { logger.warn((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage("[{}] failed to apply mappings", index()), e); throw e; } boolean requireRefresh = false; for (DocumentMapper documentMapper : updatedEntries.values()) { String mappingType = documentMapper.type(); CompressedXContent incomingMappingSource = indexMetaData.mapping(mappingType).source(); String op = existingMappers.contains(mappingType) ? "updated" : "added"; if (logger.isDebugEnabled() && incomingMappingSource.compressed().length < 512) { logger.debug("[{}] {} mapping [{}], source [{}]", index(), op, mappingType, incomingMappingSource.string()); } else if (logger.isTraceEnabled()) { logger.trace("[{}] {} mapping [{}], source [{}]", index(), op, mappingType, incomingMappingSource.string()); } else { logger.debug("[{}] {} mapping [{}] (source suppressed due to length, use TRACE level if needed)", index(), op, mappingType); } // refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same // mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the // merge version of it, which it does when refreshing the mappings), and warn log it. if (documentMapper(mappingType).mappingSource().equals(incomingMappingSource) == false) { logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index(), mappingType, incomingMappingSource, documentMapper(mappingType).mappingSource()); requireRefresh = true; } } return requireRefresh; } public void merge(Map<String, Map<String, Object>> mappings, MergeReason reason, boolean updateAllTypes) { Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size()); for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) { try { mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string())); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } } internalMerge(mappingSourcesCompressed, reason, updateAllTypes); } public void merge(IndexMetaData indexMetaData, MergeReason reason, boolean updateAllTypes) { internalMerge(indexMetaData, reason, updateAllTypes, false); } public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) { return internalMerge(Collections.singletonMap(type, mappingSource), reason, updateAllTypes).get(type); } private synchronized Map<String, DocumentMapper> internalMerge(IndexMetaData indexMetaData, MergeReason reason, boolean updateAllTypes, boolean onlyUpdateIfNeeded) { Map<String, CompressedXContent> map = new LinkedHashMap<>(); for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; if (onlyUpdateIfNeeded) { DocumentMapper existingMapper = documentMapper(mappingMetaData.type()); if (existingMapper == null || mappingMetaData.source().equals(existingMapper.mappingSource()) == false) { map.put(mappingMetaData.type(), mappingMetaData.source()); } } else { map.put(mappingMetaData.type(), mappingMetaData.source()); } } return internalMerge(map, reason, updateAllTypes); } private synchronized Map<String, DocumentMapper> internalMerge(Map<String, CompressedXContent> mappings, MergeReason reason, boolean updateAllTypes) { DocumentMapper defaultMapper = null; String defaultMappingSource = null; if (mappings.containsKey(DEFAULT_MAPPING)) { // verify we can parse it // NOTE: never apply the default here try { defaultMapper = documentParser.parse(DEFAULT_MAPPING, mappings.get(DEFAULT_MAPPING)); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, DEFAULT_MAPPING, e.getMessage()); } try { defaultMappingSource = mappings.get(DEFAULT_MAPPING).string(); } catch (IOException e) { throw new ElasticsearchGenerationException("failed to un-compress", e); } } final String defaultMappingSourceOrLastStored; if (defaultMappingSource != null) { defaultMappingSourceOrLastStored = defaultMappingSource; } else { defaultMappingSourceOrLastStored = this.defaultMappingSource; } List<DocumentMapper> documentMappers = new ArrayList<>(); for (Map.Entry<String, CompressedXContent> entry : mappings.entrySet()) { String type = entry.getKey(); if (type.equals(DEFAULT_MAPPING)) { continue; } final boolean applyDefault = // the default was already applied if we are recovering reason != MergeReason.MAPPING_RECOVERY // only apply the default mapping if we don't have the type yet && mappers.containsKey(type) == false; try { DocumentMapper documentMapper = documentParser.parse(type, entry.getValue(), applyDefault ? defaultMappingSourceOrLastStored : null); documentMappers.add(documentMapper); } catch (Exception e) { throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); } } return internalMerge(defaultMapper, defaultMappingSource, documentMappers, reason, updateAllTypes); } private synchronized Map<String, DocumentMapper> internalMerge(@Nullable DocumentMapper defaultMapper, @Nullable String defaultMappingSource, List<DocumentMapper> documentMappers, MergeReason reason, boolean updateAllTypes) { boolean hasNested = this.hasNested; boolean allEnabled = this.allEnabled; Map<String, ObjectMapper> fullPathObjectMappers = this.fullPathObjectMappers; FieldTypeLookup fieldTypes = this.fieldTypes; Set<String> parentTypes = this.parentTypes; Map<String, DocumentMapper> mappers = new HashMap<>(this.mappers); Map<String, DocumentMapper> results = new LinkedHashMap<>(documentMappers.size() + 1); if (defaultMapper != null) { assert defaultMapper.type().equals(DEFAULT_MAPPING); mappers.put(DEFAULT_MAPPING, defaultMapper); results.put(DEFAULT_MAPPING, defaultMapper); } for (DocumentMapper mapper : documentMappers) { // check naming if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } if (mapper.type().length() > 255) { throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); } if (mapper.type().charAt(0) == '_') { throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] can't start with '_'"); } if (mapper.type().contains("#")) { throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include '#' in it"); } if (mapper.type().contains(",")) { throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); } if (mapper.type().equals(mapper.parentFieldMapper().type())) { throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); } if (typeNameStartsWithIllegalDot(mapper)) { throw new IllegalArgumentException("mapping type name [" + mapper.type() + "] must not start with a '.'"); } // compute the merged DocumentMapper DocumentMapper oldMapper = mappers.get(mapper.type()); DocumentMapper newMapper; if (oldMapper != null) { newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes); } else { newMapper = mapper; } // check basic sanity of the new mapping List<ObjectMapper> objectMappers = new ArrayList<>(); List<FieldMapper> fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers, fullPathObjectMappers, fieldTypes); checkObjectsCompatibility(objectMappers, updateAllTypes, fullPathObjectMappers); // update lookup data-structures // this will in particular make sure that the merged fields are compatible with other types fieldTypes = fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes); for (ObjectMapper objectMapper : objectMappers) { if (fullPathObjectMappers == this.fullPathObjectMappers) { fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); } fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); if (objectMapper.nested().isNested()) { hasNested = true; } } if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is // a call to the update mapping API. For all other cases like // the master node restoring mappings from disk or data nodes // deserializing cluster state that was sent by the master node, // this check will be skipped. checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); } if (oldMapper == null && newMapper.parentFieldMapper().active()) { if (parentTypes == this.parentTypes) { parentTypes = new HashSet<>(this.parentTypes); } parentTypes.add(mapper.parentFieldMapper().type()); } // this is only correct because types cannot be removed and we do not // allow to disable an existing _all field allEnabled |= mapper.allFieldMapper().enabled(); results.put(newMapper.type(), newMapper); mappers.put(newMapper.type(), newMapper); } if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is // a call to the update mapping API. For all other cases like // the master node restoring mappings from disk or data nodes // deserializing cluster state that was sent by the master node, // this check will be skipped. checkNestedFieldsLimit(fullPathObjectMappers); checkDepthLimit(fullPathObjectMappers.keySet()); } for (Map.Entry<String, DocumentMapper> entry : mappers.entrySet()) { if (entry.getKey().equals(DEFAULT_MAPPING)) { continue; } DocumentMapper documentMapper = entry.getValue(); // apply changes to the field types back DocumentMapper updatedDocumentMapper = documentMapper.updateFieldType(fieldTypes.fullNameToFieldType); if (updatedDocumentMapper != documentMapper) { // update both mappers and result entry.setValue(updatedDocumentMapper); if (results.containsKey(updatedDocumentMapper.type())) { results.put(updatedDocumentMapper.type(), updatedDocumentMapper); } } } // make structures immutable mappers = Collections.unmodifiableMap(mappers); results = Collections.unmodifiableMap(results); parentTypes = Collections.unmodifiableSet(parentTypes); fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); // commit the change if (defaultMappingSource != null) { this.defaultMappingSource = defaultMappingSource; } this.mappers = mappers; this.fieldTypes = fieldTypes; this.hasNested = hasNested; this.fullPathObjectMappers = fullPathObjectMappers; this.parentTypes = parentTypes; this.allEnabled = allEnabled; assert assertMappersShareSameFieldType(); assert results.values().stream().allMatch(this::assertSerialization); return results; } private boolean assertMappersShareSameFieldType() { for (DocumentMapper mapper : docMappers(false)) { List<FieldMapper> fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); MapperUtils.collect(mapper.root(), new ArrayList<>(), fieldMappers); for (FieldMapper fieldMapper : fieldMappers) { assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); } } return true; } private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { boolean legacyIndex = getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha1); if (legacyIndex) { return mapper.type().startsWith(".") && !PERCOLATOR_LEGACY_TYPE_NAME.equals(mapper.type()); } else { return mapper.type().startsWith("."); } } private boolean assertSerialization(DocumentMapper mapper) { // capture the source now, it may change due to concurrent parsing final CompressedXContent mappingSource = mapper.mappingSource(); DocumentMapper newMapper = parse(mapper.type(), mappingSource, false); if (newMapper.mappingSource().equals(mappingSource) == false) { throw new IllegalStateException("DocumentMapper serialization result is different from source. \n--> Source [" + mappingSource + "]\n--> Result [" + newMapper.mappingSource() + "]"); } return true; } private static void checkFieldUniqueness(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, Map<String, ObjectMapper> fullPathObjectMappers, FieldTypeLookup fieldTypes) { // first check within mapping final Set<String> objectFullNames = new HashSet<>(); for (ObjectMapper objectMapper : objectMappers) { final String fullPath = objectMapper.fullPath(); if (objectFullNames.add(fullPath) == false) { throw new IllegalArgumentException("Object mapper [" + fullPath + "] is defined twice in mapping for type [" + type + "]"); } } final Set<String> fieldNames = new HashSet<>(); for (FieldMapper fieldMapper : fieldMappers) { final String name = fieldMapper.name(); if (objectFullNames.contains(name)) { throw new IllegalArgumentException("Field [" + name + "] is defined both as an object and a field in [" + type + "]"); } else if (fieldNames.add(name) == false) { throw new IllegalArgumentException("Field [" + name + "] is defined twice in [" + type + "]"); } } // then check other types for (String fieldName : fieldNames) { if (fullPathObjectMappers.containsKey(fieldName)) { throw new IllegalArgumentException("[" + fieldName + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types"); } } for (String objectPath : objectFullNames) { if (fieldTypes.get(objectPath) != null) { throw new IllegalArgumentException("[" + objectPath + "] is defined as an object in mapping [" + type + "] but this name is already used for a field in other types"); } } } private static void checkObjectsCompatibility(Collection<ObjectMapper> objectMappers, boolean updateAllTypes, Map<String, ObjectMapper> fullPathObjectMappers) { for (ObjectMapper newObjectMapper : objectMappers) { ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath()); if (existingObjectMapper != null) { // simulate a merge and ignore the result, we are just interested // in exceptions here existingObjectMapper.merge(newObjectMapper, updateAllTypes); } } } private void checkNestedFieldsLimit(Map<String, ObjectMapper> fullPathObjectMappers) { long allowedNestedFields = indexSettings.getValue(INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING); long actualNestedFields = 0; for (ObjectMapper objectMapper : fullPathObjectMappers.values()) { if (objectMapper.nested().isNested()) { actualNestedFields++; } } if (actualNestedFields > allowedNestedFields) { throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().getName() + "] has been exceeded"); } } private void checkTotalFieldsLimit(long totalMappers) { long allowedTotalFields = indexSettings.getValue(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING); if (allowedTotalFields < totalMappers) { throw new IllegalArgumentException("Limit of total fields [" + allowedTotalFields + "] in index [" + index().getName() + "] has been exceeded"); } } private void checkDepthLimit(Collection<String> objectPaths) { final long maxDepth = indexSettings.getValue(INDEX_MAPPING_DEPTH_LIMIT_SETTING); for (String objectPath : objectPaths) { checkDepthLimit(objectPath, maxDepth); } } private void checkDepthLimit(String objectPath, long maxDepth) { int numDots = 0; for (int i = 0; i < objectPath.length(); ++i) { if (objectPath.charAt(i) == '.') { numDots += 1; } } final int depth = numDots + 2; if (depth > maxDepth) { throw new IllegalArgumentException("Limit of mapping depth [" + maxDepth + "] in index [" + index().getName() + "] has been exceeded due to object field [" + objectPath + "]"); } } public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } public boolean hasMapping(String mappingType) { return mappers.containsKey(mappingType); } /** * Return the set of concrete types that have a mapping. * NOTE: this does not return the default mapping. */ public Collection<String> types() { final Set<String> types = new HashSet<>(mappers.keySet()); types.remove(DEFAULT_MAPPING); return Collections.unmodifiableSet(types); } /** * Return the {@link DocumentMapper} for the given type. By using the special * {@value #DEFAULT_MAPPING} type, you can get a {@link DocumentMapper} for * the default mapping. */ public DocumentMapper documentMapper(String type) { return mappers.get(type); } /** * Returns the document mapper created, including a mapping update if the * type has been dynamically created. */ public DocumentMapperForType documentMapperWithAutoCreate(String type) { DocumentMapper mapper = mappers.get(type); if (mapper != null) { return new DocumentMapperForType(mapper, null); } if (!dynamic) { throw new TypeMissingException(index(), new IllegalStateException("trying to auto create mapping, but dynamic mapping is disabled"), type); } mapper = parse(type, null, true); return new DocumentMapperForType(mapper, mapper.mapping()); } /** * Returns the {@link MappedFieldType} for the give fullName. * * If multiple types have fields with the same full name, the first is returned. */ public MappedFieldType fullName(String fullName) { return fieldTypes.get(fullName); } /** * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ public Collection<String> simpleMatchToIndexNames(String pattern) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return Collections.singletonList(pattern); } return fieldTypes.simpleMatchToFullName(pattern); } public ObjectMapper getObjectMapper(String name) { return fullPathObjectMappers.get(name); } /** * Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations. */ public MappedFieldType unmappedFieldType(String type) { if (type.equals("string")) { deprecationLogger.deprecated("[unmapped_type:string] should be replaced with [unmapped_type:keyword]"); type = "keyword"; } MappedFieldType fieldType = unmappedFieldTypes.get(type); if (fieldType == null) { final Mapper.TypeParser.ParserContext parserContext = documentMapperParser().parserContext(type); Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { throw new IllegalArgumentException("No mapper found for type [" + type + "]"); } final Mapper.Builder<?, ?> builder = typeParser.parse("__anonymous_" + type, emptyMap(), parserContext); final BuilderContext builderContext = new BuilderContext(indexSettings.getSettings(), new ContentPath(1)); fieldType = ((FieldMapper)builder.build(builderContext)).fieldType(); // There is no need to synchronize writes here. In the case of concurrent access, we could just // compute some mappers several times, which is not a big deal Map<String, MappedFieldType> newUnmappedFieldTypes = new HashMap<>(); newUnmappedFieldTypes.putAll(unmappedFieldTypes); newUnmappedFieldTypes.put(type, fieldType); unmappedFieldTypes = unmodifiableMap(newUnmappedFieldTypes); } return fieldType; } public Analyzer indexAnalyzer() { return this.indexAnalyzer; } public Analyzer searchAnalyzer() { return this.searchAnalyzer; } public Analyzer searchQuoteAnalyzer() { return this.searchQuoteAnalyzer; } public Set<String> getParentTypes() { return parentTypes; } @Override public void close() throws IOException { indexAnalyzers.close(); } /** * @return Whether a field is a metadata field. */ public static boolean isMetadataField(String fieldName) { return META_FIELDS.contains(fieldName); } public static String[] getAllMetaFields() { return META_FIELDS.toArray(String.class); } /** An analyzer wrapper that can lookup fields within the index mappings */ final class MapperAnalyzerWrapper extends DelegatingAnalyzerWrapper { private final Analyzer defaultAnalyzer; private final Function<MappedFieldType, Analyzer> extractAnalyzer; MapperAnalyzerWrapper(Analyzer defaultAnalyzer, Function<MappedFieldType, Analyzer> extractAnalyzer) { super(Analyzer.PER_FIELD_REUSE_STRATEGY); this.defaultAnalyzer = defaultAnalyzer; this.extractAnalyzer = extractAnalyzer; } @Override protected Analyzer getWrappedAnalyzer(String fieldName) { MappedFieldType fieldType = fullName(fieldName); if (fieldType != null) { Analyzer analyzer = extractAnalyzer.apply(fieldType); if (analyzer != null) { return analyzer; } } return defaultAnalyzer; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ctakes.ytex.kernel; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.InvalidPropertiesFormatException; import java.util.Map; import java.util.Properties; import java.util.SortedMap; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import javax.sql.DataSource; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.jdbc.core.simple.SimpleJdbcTemplate; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; public class SparseDataExporterImpl implements SparseDataExporter { private static final Log log = LogFactory .getLog(SparseDataExporterImpl.class); @SuppressWarnings("static-access") public static void main(String args[]) throws IOException { Options options = new Options(); options.addOption(OptionBuilder .withArgName("prop") .hasArg() .isRequired() .withDescription( "property file with queries and other parameters.") .create("prop")); options.addOption(OptionBuilder.withArgName("type").hasArg() .isRequired() .withDescription("export format; valid values: weka, libsvm") .create("type")); if (args.length == 0) printHelp(options); else { try { CommandLineParser parser = new GnuParser(); CommandLine line = parser.parse(options, args); String propFile = line.getOptionValue("prop"); String format = line.getOptionValue("type"); SparseDataExporter exporter = KernelContextHolder .getApplicationContext().getBean( SparseDataExporter.class); exporter.exportData(propFile, format); } catch (ParseException pe) { printHelp(options); } } } private static void printHelp(Options options) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java " + SparseDataExporterImpl.class.getName() + " export sparse data", options); } protected JdbcTemplate jdbcTemplate; protected KernelUtil kernelUtil; protected NamedParameterJdbcTemplate namedJdbcTemplate; protected Map<String, SparseDataFormatterFactory> nameToFormatterMap = new HashMap<String, SparseDataFormatterFactory>(); protected SimpleJdbcTemplate simpleJdbcTemplate; protected TransactionTemplate txTemplateNew; public SparseDataExporterImpl() { super(); } protected void addNominalWordToInstance(SparseData sparseData, long instanceId, String word, String wordValue) { // add the instance id to the set of instance ids if necessary if (!sparseData.getInstanceIds().contains(instanceId)) sparseData.getInstanceIds().add(instanceId); SortedMap<String, String> instanceWords = sparseData .getInstanceNominalWords().get(instanceId); SortedSet<String> wordValueSet = sparseData.getNominalWordValueMap() .get(word); if (instanceWords == null) { instanceWords = new TreeMap<String, String>(); sparseData.getInstanceNominalWords().put(instanceId, instanceWords); } if (wordValueSet == null) { wordValueSet = new TreeSet<String>(); sparseData.getNominalWordValueMap().put(word, wordValueSet); } // add the word-value for the instance instanceWords.put(word, wordValue); // add the value to the set of valid values wordValueSet.add(wordValue); } protected void addNumericWordToInstance(SparseData sparseData, long instanceId, String word, double wordValue) { // add the instance id to the set of instance ids if necessary if (!sparseData.getInstanceIds().contains(instanceId)) sparseData.getInstanceIds().add(instanceId); // add the numeric word to the map of words for this document SortedMap<String, Double> words = sparseData.getInstanceNumericWords() .get(instanceId); if (words == null) { words = new TreeMap<String, Double>(); sparseData.getInstanceNumericWords().put(instanceId, words); } words.put(word, wordValue); sparseData.getNumericWords().add(word); } /* * (non-Javadoc) * * @see org.apache.ctakes.ytex.kernel.SparseDataExporter#exportData(org.apache.ctakes.ytex.kernel.SparseData, * org.apache.ctakes.ytex.kernel.SparseDataFormatter, java.util.Properties) */ public void exportData(InstanceData instanceLabel, SparseDataFormatter formatter, Properties properties, BagOfWordsDecorator bDecorator) throws IOException { String scope = properties.getProperty("scope", null); SparseData sparseData = null; if (scope == null) { sparseData = this.loadData(instanceLabel, properties.getProperty("numericWordQuery"), properties.getProperty("nominalWordQuery"), properties.getProperty("prepareScript"), properties.getProperty("prepareScriptDelimiter", ";"), bDecorator, null, null, null); } formatter.initializeExport(instanceLabel, properties, sparseData); for (String label : instanceLabel.getLabelToInstanceMap().keySet()) { if ("label".equals(scope)) { sparseData = this.loadData(instanceLabel, properties.getProperty("numericWordQuery"), properties.getProperty("nominalWordQuery"), properties.getProperty("prepareScript"), properties.getProperty("prepareScriptDelimiter", ";"), bDecorator, label, null, null); } formatter .initializeLabel(label, instanceLabel .getLabelToInstanceMap().get(label), properties, sparseData); for (int run : instanceLabel.getLabelToInstanceMap().get(label) .keySet()) { for (int fold : instanceLabel.getLabelToInstanceMap() .get(label).get(run).keySet()) { if (log.isInfoEnabled() && (label.length() > 0 || run > 0 || fold > 0)) log.info("exporting, label " + label + " run " + run + " fold " + fold); if ("fold".equals(scope)) { sparseData = this.loadData(instanceLabel, properties .getProperty("numericWordQuery"), properties .getProperty("nominalWordQuery"), properties .getProperty("prepareScript"), properties .getProperty("prepareScriptDelimiter", ";"), bDecorator, label, fold, run); } formatter.initializeFold(sparseData, label, run, fold, instanceLabel.getLabelToInstanceMap().get(label) .get(run).get(fold)); for (boolean train : instanceLabel.getLabelToInstanceMap() .get(label).get(run).get(fold).keySet()) { formatter.exportFold(sparseData, instanceLabel .getLabelToInstanceMap().get(label).get(run) .get(fold).get(train), train, label, 0 == run ? null : run, 0 == fold ? null : fold); } formatter.clearFold(); } } formatter.clearLabel(); } } /* * (non-Javadoc) * * @see org.apache.ctakes.ytex.kernel.SparseDataExporter#exportData(java.util.Properties, * org.apache.ctakes.ytex.kernel.SparseDataFormatter, org.apache.ctakes.ytex.kernel.BagOfWordsDecorator) */ @Override public void exportData(Properties props, SparseDataFormatter formatter, BagOfWordsDecorator bDecorator) throws IOException { InstanceData instanceLabel = this.getKernelUtil().loadInstances( props.getProperty("instanceClassQuery")); if (props.containsKey("folds")) { this.getKernelUtil().generateFolds(instanceLabel, props); } // load label - instance id maps // sparseData.setLabelToInstanceMap(this.getKernelUtil().loadInstances( // props.getProperty("instanceClassQuery"), // sparseData.getLabelToClassMap())); this.exportData(instanceLabel, formatter, props, bDecorator); // this.loadData(sparseData, // props.getProperty("numericWordQuery"), // props.getProperty("nominalWordQuery"), bDecorator); // this.exportData(sparseData, formatter, props); } /* * (non-Javadoc) * * @see org.apache.ctakes.ytex.kernel.SparseDataExporter#exportData(java.lang.String, * java.lang.String) */ @Override public void exportData(String propertiesFile, String format) throws IOException, InvalidPropertiesFormatException { Properties props = new Properties(); this.getKernelUtil().loadProperties(propertiesFile, props); this.exportData(props, nameToFormatterMap.get(format.toLowerCase()) .getFormatter(), null); } public DataSource getDataSource(DataSource ds) { return this.jdbcTemplate.getDataSource(); } public KernelUtil getKernelUtil() { return kernelUtil; } public Map<String, SparseDataFormatterFactory> getNameToFormatterMap() { return nameToFormatterMap; } /** * run the prepare script if defined. * * @param prepareScript * sequence of sql statements to be executed with named params. * @param prepareScriptDelimiter * delimiter separating the sql statements. * @param params * for named parameters in sql statements. */ protected void prepare(final String prepareScript, final String prepareScriptDelimiter, final Map<String, Object> params) { if (prepareScript != null && prepareScript.length() > 0) { String[] statements = prepareScript.split(prepareScriptDelimiter); // throw out empty lines for (String sql : statements) { if (sql != null && sql.trim().length() > 0) { this.namedJdbcTemplate.update(sql, params); } } } } /** * * @param sql * result set has 3 columns. 1st column - integer - instance id. * 2nd column - word. 3rd column - word value. * @param instanceWordMap * map of instance id to word-word value. * @param wordValueMap * map of word to valid values for the word. * @return populate maps with results of query. */ protected void getNominalInstanceWords(final String sql, final String prepareScript, final String prepareScriptDelimiter, final SparseData sparseData, final Map<String, Object> params) { txTemplateNew.execute(new TransactionCallback<Object>() { // new PreparedStatementCreator() { // @Override // public PreparedStatement createPreparedStatement( // Connection conn) throws SQLException { // return conn.prepareStatement(sql, // ResultSet.TYPE_FORWARD_ONLY, // ResultSet.CONCUR_READ_ONLY); // } // // } @Override public Object doInTransaction(TransactionStatus txStatus) { prepare(prepareScript, prepareScriptDelimiter, params); namedJdbcTemplate.query(sql, params, new RowCallbackHandler() { @Override public void processRow(ResultSet rs) throws SQLException { long instanceId = rs.getLong(1); String word = rs.getString(2); String wordValue = rs.getString(3); addNominalWordToInstance(sparseData, instanceId, word, wordValue); } }); return null; } }); } /** * * @param sql * result 1st column: instance id, 2nd column: word, 3rd column: * numeric word value * @param instanceNumericWords * map of instance id - [map word - word value] to be populated */ protected void getNumericInstanceWords(final String sql, final String prepareScript, final String prepareScriptDelimiter, final SparseData sparseData, final Map<String, Object> params) { txTemplateNew.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus txStatus) { prepare(prepareScript, prepareScriptDelimiter, params); namedJdbcTemplate.query(sql, params // new PreparedStatementCreator() { // // @Override // public PreparedStatement createPreparedStatement( // Connection conn) throws SQLException { // return conn.prepareStatement(sql, // ResultSet.TYPE_FORWARD_ONLY, // ResultSet.CONCUR_READ_ONLY); // } // // } , new RowCallbackHandler() { @Override public void processRow(ResultSet rs) throws SQLException { long instanceId = rs.getLong(1); String word = rs.getString(2); double wordValue = rs.getDouble(3); addNumericWordToInstance(sparseData, instanceId, word, wordValue); } }); return null; } }); } public TransactionTemplate getTxTemplateNew() { return txTemplateNew; } /** * * @param instanceLabel * instance data: label - fold - instance id - class map * @param instanceNumericWordQuery * query to get numeric attributes * @param instanceNominalWordQuery * query to get nominal attributes * @param prepareScript * prepare script to be executed in same tx as instance attribute * queries * @param prepareScriptDelimiter * delimiter for statements in prepare script * @param bDecorator * decorator to add attributes * @param label * @param fold * @param run * @return */ protected SparseData loadData(InstanceData instanceLabel, String instanceNumericWordQuery, String instanceNominalWordQuery, String prepareScript, String prepareScriptDelimiter, BagOfWordsDecorator bDecorator, String label, Integer fold, Integer run) { SparseData sparseData = new SparseData(); Map<String, Object> params = new HashMap<String, Object>(); if (label != null && label.length() > 0) params.put("label", label); if (fold != null && fold != 0) params.put("fold", fold); if (run != null && run != 0) params.put("run", run); // load numeric attributes if (instanceNumericWordQuery != null && instanceNumericWordQuery.trim().length() > 0) this.getNumericInstanceWords(instanceNumericWordQuery, prepareScript, prepareScriptDelimiter, sparseData, params); // added to support adding gram matrix index in GramMatrixExporter if (bDecorator != null) bDecorator.decorateNumericInstanceWords( sparseData.getInstanceNumericWords(), sparseData.getNumericWords()); // load nominal attributes if (instanceNominalWordQuery != null && instanceNominalWordQuery.trim().length() > 0) this.getNominalInstanceWords(instanceNominalWordQuery, prepareScript, prepareScriptDelimiter, sparseData, params); if (bDecorator != null) bDecorator.decorateNominalInstanceWords( sparseData.getInstanceNominalWords(), sparseData.getNominalWordValueMap()); return sparseData; } public void setDataSource(DataSource ds) { this.jdbcTemplate = new JdbcTemplate(ds); this.simpleJdbcTemplate = new SimpleJdbcTemplate(ds); this.namedJdbcTemplate = new NamedParameterJdbcTemplate(ds); } public void setKernelUtil(KernelUtil kernelUtil) { this.kernelUtil = kernelUtil; } public void setNameToFormatterMap( Map<String, SparseDataFormatterFactory> nameToFormatterMap) { this.nameToFormatterMap = nameToFormatterMap; } public void setTxTemplateNew(TransactionTemplate txTemplateNew) { this.txTemplateNew = txTemplateNew; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.hadoop.fs.PathIsNotDirectoryException; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.Diff.ListType; import org.apache.hadoop.hdfs.util.ReadOnlyList; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import static org.apache.hadoop.hdfs.protocol.HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; /** * Directory INode class. */ public class INodeDirectory extends INodeWithAdditionalFields implements INodeDirectoryAttributes { /** Cast INode to INodeDirectory. */ public static INodeDirectory valueOf(INode inode, Object path ) throws FileNotFoundException, PathIsNotDirectoryException { if (inode == null) { throw new FileNotFoundException("Directory does not exist: " + DFSUtil.path2String(path)); } if (!inode.isDirectory()) { throw new PathIsNotDirectoryException(DFSUtil.path2String(path)); } return inode.asDirectory(); } protected static final int DEFAULT_FILES_PER_DIRECTORY = 5; final static byte[] ROOT_NAME = DFSUtil.string2Bytes(""); private List<INode> children = null; /** constructor */ public INodeDirectory(long id, byte[] name, PermissionStatus permissions, long mtime) { super(id, name, permissions, mtime, 0L); } /** * Copy constructor * @param other The INodeDirectory to be copied * @param adopt Indicate whether or not need to set the parent field of child * INodes to the new node * @param featuresToCopy any number of features to copy to the new node. * The method will do a reference copy, not a deep copy. */ public INodeDirectory(INodeDirectory other, boolean adopt, Feature... featuresToCopy) { super(other); this.children = other.children; if (adopt && this.children != null) { for (INode child : children) { child.setParent(this); } } this.features = featuresToCopy; AclFeature aclFeature = getFeature(AclFeature.class); if (aclFeature != null) { // for the de-duplication of AclFeature removeFeature(aclFeature); addFeature(AclStorage.addAclFeature(aclFeature)); } } /** @return true unconditionally. */ @Override public final boolean isDirectory() { return true; } /** @return this object. */ @Override public final INodeDirectory asDirectory() { return this; } @Override public byte getLocalStoragePolicyID() { XAttrFeature f = getXAttrFeature(); ImmutableList<XAttr> xattrs = f == null ? ImmutableList.<XAttr> of() : f .getXAttrs(); for (XAttr xattr : xattrs) { if (BlockStoragePolicySuite.isStoragePolicyXAttr(xattr)) { return (xattr.getValue())[0]; } } return BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; } @Override public byte getStoragePolicyID() { byte id = getLocalStoragePolicyID(); if (id != BLOCK_STORAGE_POLICY_ID_UNSPECIFIED) { return id; } // if it is unspecified, check its parent return getParent() != null ? getParent().getStoragePolicyID() : BLOCK_STORAGE_POLICY_ID_UNSPECIFIED; } void setQuota(BlockStoragePolicySuite bsps, long nsQuota, long ssQuota, StorageType type) { DirectoryWithQuotaFeature quota = getDirectoryWithQuotaFeature(); if (quota != null) { // already has quota; so set the quota to the new values if (type != null) { quota.setQuota(ssQuota, type); } else { quota.setQuota(nsQuota, ssQuota); } if (!isQuotaSet() && !isRoot()) { removeFeature(quota); } } else { final QuotaCounts c = computeQuotaUsage(bsps); DirectoryWithQuotaFeature.Builder builder = new DirectoryWithQuotaFeature.Builder().nameSpaceQuota(nsQuota); if (type != null) { builder.typeQuota(type, ssQuota); } else { builder.storageSpaceQuota(ssQuota); } addDirectoryWithQuotaFeature(builder.build()).setSpaceConsumed(c); } } @Override public QuotaCounts getQuotaCounts() { final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); return q != null? q.getQuota(): super.getQuotaCounts(); } @Override public void addSpaceConsumed(QuotaCounts counts, boolean verify) throws QuotaExceededException { final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null) { q.addSpaceConsumed(this, counts, verify); } else { addSpaceConsumed2Parent(counts, verify); } } /** * If the directory contains a {@link DirectoryWithQuotaFeature}, return it; * otherwise, return null. */ public final DirectoryWithQuotaFeature getDirectoryWithQuotaFeature() { return getFeature(DirectoryWithQuotaFeature.class); } /** Is this directory with quota? */ final boolean isWithQuota() { return getDirectoryWithQuotaFeature() != null; } DirectoryWithQuotaFeature addDirectoryWithQuotaFeature( DirectoryWithQuotaFeature q) { Preconditions.checkState(!isWithQuota(), "Directory is already with quota"); addFeature(q); return q; } int searchChildren(byte[] name) { return children == null? -1: Collections.binarySearch(children, name); } public DirectoryWithSnapshotFeature addSnapshotFeature( DirectoryDiffList diffs) { Preconditions.checkState(!isWithSnapshot(), "Directory is already with snapshot"); DirectoryWithSnapshotFeature sf = new DirectoryWithSnapshotFeature(diffs); addFeature(sf); return sf; } /** * If feature list contains a {@link DirectoryWithSnapshotFeature}, return it; * otherwise, return null. */ public final DirectoryWithSnapshotFeature getDirectoryWithSnapshotFeature() { return getFeature(DirectoryWithSnapshotFeature.class); } /** Is this file has the snapshot feature? */ public final boolean isWithSnapshot() { return getDirectoryWithSnapshotFeature() != null; } public DirectoryDiffList getDiffs() { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); return sf != null ? sf.getDiffs() : null; } @Override public INodeDirectoryAttributes getSnapshotINode(int snapshotId) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); return sf == null ? this : sf.getDiffs().getSnapshotINode(snapshotId, this); } @Override public String toDetailString() { DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); return super.toDetailString() + (sf == null ? "" : ", " + sf.getDiffs()); } public DirectorySnapshottableFeature getDirectorySnapshottableFeature() { return getFeature(DirectorySnapshottableFeature.class); } public boolean isSnapshottable() { return getDirectorySnapshottableFeature() != null; } public Snapshot getSnapshot(byte[] snapshotName) { return getDirectorySnapshottableFeature().getSnapshot(snapshotName); } public void setSnapshotQuota(int snapshotQuota) { getDirectorySnapshottableFeature().setSnapshotQuota(snapshotQuota); } public Snapshot addSnapshot(int id, String name) throws SnapshotException, QuotaExceededException { return getDirectorySnapshottableFeature().addSnapshot(this, id, name); } public Snapshot removeSnapshot(BlockStoragePolicySuite bsps, String snapshotName, BlocksMapUpdateInfo collectedBlocks, final List<INode> removedINodes) throws SnapshotException { return getDirectorySnapshottableFeature().removeSnapshot(bsps, this, snapshotName, collectedBlocks, removedINodes); } public void renameSnapshot(String path, String oldName, String newName) throws SnapshotException { getDirectorySnapshottableFeature().renameSnapshot(path, oldName, newName); } /** add DirectorySnapshottableFeature */ public void addSnapshottableFeature() { Preconditions.checkState(!isSnapshottable(), "this is already snapshottable, this=%s", this); DirectoryWithSnapshotFeature s = this.getDirectoryWithSnapshotFeature(); final DirectorySnapshottableFeature snapshottable = new DirectorySnapshottableFeature(s); if (s != null) { this.removeFeature(s); } this.addFeature(snapshottable); } /** remove DirectorySnapshottableFeature */ public void removeSnapshottableFeature() { DirectorySnapshottableFeature s = getDirectorySnapshottableFeature(); Preconditions.checkState(s != null, "The dir does not have snapshottable feature: this=%s", this); this.removeFeature(s); if (s.getDiffs().asList().size() > 0) { // add a DirectoryWithSnapshotFeature back DirectoryWithSnapshotFeature sf = new DirectoryWithSnapshotFeature( s.getDiffs()); addFeature(sf); } } /** * Replace the given child with a new child. Note that we no longer need to * replace an normal INodeDirectory or INodeFile into an * INodeDirectoryWithSnapshot or INodeFileUnderConstruction. The only cases * for child replacement is for reference nodes. */ public void replaceChild(INode oldChild, final INode newChild, final INodeMap inodeMap) { Preconditions.checkNotNull(children); final int i = searchChildren(newChild.getLocalNameBytes()); Preconditions.checkState(i >= 0); Preconditions.checkState(oldChild == children.get(i) || oldChild == children.get(i).asReference().getReferredINode() .asReference().getReferredINode()); oldChild = children.get(i); if (oldChild.isReference() && newChild.isReference()) { // both are reference nodes, e.g., DstReference -> WithName final INodeReference.WithCount withCount = (WithCount) oldChild.asReference().getReferredINode(); withCount.removeReference(oldChild.asReference()); } children.set(i, newChild); // replace the instance in the created list of the diff list DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf != null) { sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild); } // update the inodeMap if (inodeMap != null) { inodeMap.put(newChild); } } INodeReference.WithName replaceChild4ReferenceWithName(INode oldChild, int latestSnapshotId) { Preconditions.checkArgument(latestSnapshotId != Snapshot.CURRENT_STATE_ID); if (oldChild instanceof INodeReference.WithName) { return (INodeReference.WithName)oldChild; } final INodeReference.WithCount withCount; if (oldChild.isReference()) { Preconditions.checkState(oldChild instanceof INodeReference.DstReference); withCount = (INodeReference.WithCount) oldChild.asReference() .getReferredINode(); } else { withCount = new INodeReference.WithCount(null, oldChild); } final INodeReference.WithName ref = new INodeReference.WithName(this, withCount, oldChild.getLocalNameBytes(), latestSnapshotId); replaceChild(oldChild, ref, null); return ref; } @Override public void recordModification(int latestSnapshotId) { if (isInLatestSnapshot(latestSnapshotId) && !shouldRecordInSrcSnapshot(latestSnapshotId)) { // add snapshot feature if necessary DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf == null) { sf = addSnapshotFeature(null); } // record self in the diff list if necessary sf.getDiffs().saveSelf2Snapshot(latestSnapshotId, this, null); } } /** * Save the child to the latest snapshot. * * @return the child inode, which may be replaced. */ public INode saveChild2Snapshot(final INode child, final int latestSnapshotId, final INode snapshotCopy) { if (latestSnapshotId == Snapshot.CURRENT_STATE_ID) { return child; } // add snapshot feature if necessary DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.saveChild2Snapshot(this, child, latestSnapshotId, snapshotCopy); } /** * @param name the name of the child * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the corresponding snapshot; otherwise, get the result from * the current directory. * @return the child inode. */ public INode getChild(byte[] name, int snapshotId) { DirectoryWithSnapshotFeature sf; if (snapshotId == Snapshot.CURRENT_STATE_ID || (sf = getDirectoryWithSnapshotFeature()) == null) { ReadOnlyList<INode> c = getCurrentChildrenList(); final int i = ReadOnlyList.Util.binarySearch(c, name); return i < 0 ? null : c.get(i); } return sf.getChild(this, name, snapshotId); } /** * Search for the given INode in the children list and the deleted lists of * snapshots. * @return {@link Snapshot#CURRENT_STATE_ID} if the inode is in the children * list; {@link Snapshot#NO_SNAPSHOT_ID} if the inode is neither in the * children list nor in any snapshot; otherwise the snapshot id of the * corresponding snapshot diff list. */ public int searchChild(INode inode) { INode child = getChild(inode.getLocalNameBytes(), Snapshot.CURRENT_STATE_ID); if (child != inode) { // inode is not in parent's children list, thus inode must be in // snapshot. identify the snapshot id and later add it into the path DirectoryDiffList diffs = getDiffs(); if (diffs == null) { return Snapshot.NO_SNAPSHOT_ID; } return diffs.findSnapshotDeleted(inode); } else { return Snapshot.CURRENT_STATE_ID; } } /** * @param snapshotId * if it is not {@link Snapshot#CURRENT_STATE_ID}, get the result * from the corresponding snapshot; otherwise, get the result from * the current directory. * @return the current children list if the specified snapshot is null; * otherwise, return the children list corresponding to the snapshot. * Note that the returned list is never null. */ public ReadOnlyList<INode> getChildrenList(final int snapshotId) { DirectoryWithSnapshotFeature sf; if (snapshotId == Snapshot.CURRENT_STATE_ID || (sf = this.getDirectoryWithSnapshotFeature()) == null) { return getCurrentChildrenList(); } return sf.getChildrenList(this, snapshotId); } private ReadOnlyList<INode> getCurrentChildrenList() { return children == null ? ReadOnlyList.Util.<INode> emptyList() : ReadOnlyList.Util.asReadOnlyList(children); } /** * Given a child's name, return the index of the next child * * @param name a child's name * @return the index of the next child */ static int nextChild(ReadOnlyList<INode> children, byte[] name) { if (name.length == 0) { // empty name return 0; } int nextPos = ReadOnlyList.Util.binarySearch(children, name) + 1; if (nextPos >= 0) { return nextPos; } return -nextPos; } /** * Remove the specified child from this directory. */ public boolean removeChild(INode child, int latestSnapshotId) { if (isInLatestSnapshot(latestSnapshotId)) { // create snapshot feature if necessary DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.removeChild(this, child, latestSnapshotId); } return removeChild(child); } /** * Remove the specified child from this directory. * The basic remove method which actually calls children.remove(..). * * @param child the child inode to be removed * * @return true if the child is removed; false if the child is not found. */ public boolean removeChild(final INode child) { final int i = searchChildren(child.getLocalNameBytes()); if (i < 0) { return false; } final INode removed = children.remove(i); Preconditions.checkState(removed == child); return true; } /** * Add a child inode to the directory. * * @param node INode to insert * @param setModTime set modification time for the parent node * not needed when replaying the addition and * the parent already has the proper mod time * @return false if the child with this name already exists; * otherwise, return true; */ public boolean addChild(INode node, final boolean setModTime, final int latestSnapshotId) throws QuotaExceededException { final int low = searchChildren(node.getLocalNameBytes()); if (low >= 0) { return false; } if (isInLatestSnapshot(latestSnapshotId)) { // create snapshot feature if necessary DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); if (sf == null) { sf = this.addSnapshotFeature(null); } return sf.addChild(this, node, setModTime, latestSnapshotId); } addChild(node, low); if (setModTime) { // update modification time of the parent directory updateModificationTime(node.getModificationTime(), latestSnapshotId); } return true; } public boolean addChild(INode node) { final int low = searchChildren(node.getLocalNameBytes()); if (low >= 0) { return false; } addChild(node, low); return true; } /** * Add the node to the children list at the given insertion point. * The basic add method which actually calls children.add(..). */ private void addChild(final INode node, final int insertionPoint) { if (children == null) { children = new ArrayList<INode>(DEFAULT_FILES_PER_DIRECTORY); } node.setParent(this); children.add(-insertionPoint - 1, node); if (node.getGroupName() == null) { node.setGroup(getGroupName()); } } @Override public QuotaCounts computeQuotaUsage(BlockStoragePolicySuite bsps, byte blockStoragePolicyId, QuotaCounts counts, boolean useCache, int lastSnapshotId) { final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); // we are computing the quota usage for a specific snapshot here, i.e., the // computation only includes files/directories that exist at the time of the // given snapshot if (sf != null && lastSnapshotId != Snapshot.CURRENT_STATE_ID && !(useCache && isQuotaSet())) { ReadOnlyList<INode> childrenList = getChildrenList(lastSnapshotId); for (INode child : childrenList) { final byte childPolicyId = child.getStoragePolicyIDForQuota(blockStoragePolicyId); child.computeQuotaUsage(bsps, childPolicyId, counts, useCache, lastSnapshotId); } counts.addNameSpace(1); return counts; } // compute the quota usage in the scope of the current directory tree final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (useCache && q != null && q.isQuotaSet()) { // use the cached quota return q.AddCurrentSpaceUsage(counts); } else { useCache = q != null && !q.isQuotaSet() ? false : useCache; return computeDirectoryQuotaUsage(bsps, blockStoragePolicyId, counts, useCache, lastSnapshotId); } } private QuotaCounts computeDirectoryQuotaUsage(BlockStoragePolicySuite bsps, byte blockStoragePolicyId, QuotaCounts counts, boolean useCache, int lastSnapshotId) { if (children != null) { for (INode child : children) { final byte childPolicyId = child.getStoragePolicyIDForQuota(blockStoragePolicyId); child.computeQuotaUsage(bsps, childPolicyId, counts, useCache, lastSnapshotId); } } return computeQuotaUsage4CurrentDirectory(bsps, blockStoragePolicyId, counts); } /** Add quota usage for this inode excluding children. */ public QuotaCounts computeQuotaUsage4CurrentDirectory( BlockStoragePolicySuite bsps, byte storagePolicyId, QuotaCounts counts) { counts.addNameSpace(1); // include the diff list DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null) { sf.computeQuotaUsage4CurrentDirectory(bsps, storagePolicyId, counts); } return counts; } @Override public ContentSummaryComputationContext computeContentSummary( ContentSummaryComputationContext summary) { final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null) { sf.computeContentSummary4Snapshot(summary.getBlockStoragePolicySuite(), summary.getCounts()); } final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null) { return q.computeContentSummary(this, summary); } else { return computeDirectoryContentSummary(summary, Snapshot.CURRENT_STATE_ID); } } protected ContentSummaryComputationContext computeDirectoryContentSummary( ContentSummaryComputationContext summary, int snapshotId) { ReadOnlyList<INode> childrenList = getChildrenList(snapshotId); // Explicit traversing is done to enable repositioning after relinquishing // and reacquiring locks. for (int i = 0; i < childrenList.size(); i++) { INode child = childrenList.get(i); byte[] childName = child.getLocalNameBytes(); long lastYieldCount = summary.getYieldCount(); child.computeContentSummary(summary); // Check whether the computation was paused in the subtree. // The counts may be off, but traversing the rest of children // should be made safe. if (lastYieldCount == summary.getYieldCount()) { continue; } // The locks were released and reacquired. Check parent first. if (getParent() == null) { // Stop further counting and return whatever we have so far. break; } // Obtain the children list again since it may have been modified. childrenList = getChildrenList(snapshotId); // Reposition in case the children list is changed. Decrement by 1 // since it will be incremented when loops. i = nextChild(childrenList, childName) - 1; } // Increment the directory count for this directory. summary.getCounts().addContent(Content.DIRECTORY, 1); // Relinquish and reacquire locks if necessary. summary.yield(); return summary; } /** * This method is usually called by the undo section of rename. * * Before calling this function, in the rename operation, we replace the * original src node (of the rename operation) with a reference node (WithName * instance) in both the children list and a created list, delete the * reference node from the children list, and add it to the corresponding * deleted list. * * To undo the above operations, we have the following steps in particular: * * <pre> * 1) remove the WithName node from the deleted list (if it exists) * 2) replace the WithName node in the created list with srcChild * 3) add srcChild back as a child of srcParent. Note that we already add * the node into the created list of a snapshot diff in step 2, we do not need * to add srcChild to the created list of the latest snapshot. * </pre> * * We do not need to update quota usage because the old child is in the * deleted list before. * * @param oldChild * The reference node to be removed/replaced * @param newChild * The node to be added back * @throws QuotaExceededException should not throw this exception */ public void undoRename4ScrParent(final INodeReference oldChild, final INode newChild) throws QuotaExceededException { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); Preconditions.checkState(sf != null, "Directory does not have snapshot feature"); sf.getDiffs().removeChild(ListType.DELETED, oldChild); sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild); addChild(newChild, true, Snapshot.CURRENT_STATE_ID); } /** * Undo the rename operation for the dst tree, i.e., if the rename operation * (with OVERWRITE option) removes a file/dir from the dst tree, add it back * and delete possible record in the deleted list. */ public void undoRename4DstParent(final BlockStoragePolicySuite bsps, final INode deletedChild, int latestSnapshotId) throws QuotaExceededException { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); Preconditions.checkState(sf != null, "Directory does not have snapshot feature"); boolean removeDeletedChild = sf.getDiffs().removeChild(ListType.DELETED, deletedChild); int sid = removeDeletedChild ? Snapshot.CURRENT_STATE_ID : latestSnapshotId; final boolean added = addChild(deletedChild, true, sid); // update quota usage if adding is successfully and the old child has not // been stored in deleted list before if (added && !removeDeletedChild) { final QuotaCounts counts = deletedChild.computeQuotaUsage(bsps); addSpaceConsumed(counts, false); } } /** Set the children list to null. */ public void clearChildren() { this.children = null; } @Override public void clear() { super.clear(); clearChildren(); } /** Call cleanSubtree(..) recursively down the subtree. */ public QuotaCounts cleanSubtreeRecursively(final BlockStoragePolicySuite bsps, final int snapshot, int prior, final BlocksMapUpdateInfo collectedBlocks, final List<INode> removedINodes, final Map<INode, INode> excludedNodes) { QuotaCounts counts = new QuotaCounts.Builder().build(); // in case of deletion snapshot, since this call happens after we modify // the diff list, the snapshot to be deleted has been combined or renamed // to its latest previous snapshot. (besides, we also need to consider nodes // created after prior but before snapshot. this will be done in // DirectoryWithSnapshotFeature) int s = snapshot != Snapshot.CURRENT_STATE_ID && prior != Snapshot.NO_SNAPSHOT_ID ? prior : snapshot; for (INode child : getChildrenList(s)) { if (snapshot != Snapshot.CURRENT_STATE_ID && excludedNodes != null && excludedNodes.containsKey(child)) { continue; } else { QuotaCounts childCounts = child.cleanSubtree(bsps, snapshot, prior, collectedBlocks, removedINodes); counts.add(childCounts); } } return counts; } @Override public void destroyAndCollectBlocks(final BlockStoragePolicySuite bsps, final BlocksMapUpdateInfo collectedBlocks, final List<INode> removedINodes) { final DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); if (sf != null) { sf.clear(bsps, this, collectedBlocks, removedINodes); } for (INode child : getChildrenList(Snapshot.CURRENT_STATE_ID)) { child.destroyAndCollectBlocks(bsps, collectedBlocks, removedINodes); } if (getAclFeature() != null) { AclStorage.removeAclFeature(getAclFeature()); } clear(); removedINodes.add(this); } @Override public QuotaCounts cleanSubtree(final BlockStoragePolicySuite bsps, final int snapshotId, int priorSnapshotId, final BlocksMapUpdateInfo collectedBlocks, final List<INode> removedINodes) { DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); // there is snapshot data if (sf != null) { return sf.cleanDirectory(bsps, this, snapshotId, priorSnapshotId, collectedBlocks, removedINodes); } // there is no snapshot data if (priorSnapshotId == Snapshot.NO_SNAPSHOT_ID && snapshotId == Snapshot.CURRENT_STATE_ID) { // destroy the whole subtree and collect blocks that should be deleted QuotaCounts counts = new QuotaCounts.Builder().build(); this.computeQuotaUsage(bsps, counts, true); destroyAndCollectBlocks(bsps, collectedBlocks, removedINodes); return counts; } else { // process recursively down the subtree QuotaCounts counts = cleanSubtreeRecursively(bsps, snapshotId, priorSnapshotId, collectedBlocks, removedINodes, null); if (isQuotaSet()) { getDirectoryWithQuotaFeature().addSpaceConsumed2Cache(counts.negation()); } return counts; } } /** * Compare the metadata with another INodeDirectory */ @Override public boolean metadataEquals(INodeDirectoryAttributes other) { return other != null && getQuotaCounts().equals(other.getQuotaCounts()) && getPermissionLong() == other.getPermissionLong() && getAclFeature() == other.getAclFeature() && getXAttrFeature() == other.getXAttrFeature(); } /* * The following code is to dump the tree recursively for testing. * * \- foo (INodeDirectory@33dd2717) * \- sub1 (INodeDirectory@442172) * +- file1 (INodeFile@78392d4) * +- file2 (INodeFile@78392d5) * +- sub11 (INodeDirectory@8400cff) * \- file3 (INodeFile@78392d6) * \- z_file4 (INodeFile@45848712) */ static final String DUMPTREE_EXCEPT_LAST_ITEM = "+-"; static final String DUMPTREE_LAST_ITEM = "\\-"; @VisibleForTesting @Override public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, final int snapshot) { super.dumpTreeRecursively(out, prefix, snapshot); out.print(", childrenSize=" + getChildrenList(snapshot).size()); final DirectoryWithQuotaFeature q = getDirectoryWithQuotaFeature(); if (q != null) { out.print(", " + q); } if (this instanceof Snapshot.Root) { out.print(", snapshotId=" + snapshot); } out.println(); if (prefix.length() >= 2) { prefix.setLength(prefix.length() - 2); prefix.append(" "); } dumpTreeRecursively(out, prefix, new Iterable<SnapshotAndINode>() { final Iterator<INode> i = getChildrenList(snapshot).iterator(); @Override public Iterator<SnapshotAndINode> iterator() { return new Iterator<SnapshotAndINode>() { @Override public boolean hasNext() { return i.hasNext(); } @Override public SnapshotAndINode next() { return new SnapshotAndINode(snapshot, i.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }); final DirectorySnapshottableFeature s = getDirectorySnapshottableFeature(); if (s != null) { s.dumpTreeRecursively(this, out, prefix, snapshot); } } /** * Dump the given subtrees. * @param prefix The prefix string that each line should print. * @param subs The subtrees. */ @VisibleForTesting public static void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, Iterable<SnapshotAndINode> subs) { if (subs != null) { for(final Iterator<SnapshotAndINode> i = subs.iterator(); i.hasNext();) { final SnapshotAndINode pair = i.next(); prefix.append(i.hasNext()? DUMPTREE_EXCEPT_LAST_ITEM: DUMPTREE_LAST_ITEM); pair.inode.dumpTreeRecursively(out, prefix, pair.snapshotId); prefix.setLength(prefix.length() - 2); } } } /** A pair of Snapshot and INode objects. */ public static class SnapshotAndINode { public final int snapshotId; public final INode inode; public SnapshotAndINode(int snapshot, INode inode) { this.snapshotId = snapshot; this.inode = inode; } } public final int getChildrenNum(final int snapshotId) { return getChildrenList(snapshotId).size(); } }
/*- * -\-\- * Helios Client * -- * Copyright (C) 2016 Spotify AB * -- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -/-/- */ package com.spotify.helios.common.descriptors; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkArgument; import static com.spotify.helios.common.descriptors.Descriptor.parse; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.hasEntry; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.BaseEncoding; import com.spotify.helios.common.Hash; import com.spotify.helios.common.Json; import java.io.IOException; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Test; public class JobTest { private Map<String, Object> map(final Object... objects) { final ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); checkArgument(objects.length % 2 == 0); for (int i = 0; i < objects.length; i += 2) { builder.put((String) objects[i], objects[i + 1]); } return builder.build(); } @Test public void testNormalizedExcludesEmptyStrings() throws Exception { final Job j = Job.newBuilder().setName("x").setImage("x").setVersion("x") .setRegistrationDomain("").build(); assertFalse(Json.asNormalizedString(j).contains("registrationDomain")); } @Test public void verifyBuilder() throws Exception { final Job.Builder builder = Job.newBuilder(); // Input to setXXX final String setName = "set_name"; final String setVersion = "set_version"; final String setImage = "set_image"; final String setHostname = "set_hostname"; final List<String> setCommand = asList("set", "command"); final Map<String, String> setEnv = ImmutableMap.of("set", "env"); final Map<String, PortMapping> setPorts = ImmutableMap.of("set_ports", PortMapping.of(1234)); final ImmutableMap.Builder<String, ServicePortParameters> setServicePortsBuilder = ImmutableMap.builder(); setServicePortsBuilder.put("set_ports1", new ServicePortParameters( ImmutableList.of("tag1", "tag2"))); setServicePortsBuilder.put("set_ports2", new ServicePortParameters( ImmutableList.of("tag3", "tag4"))); final ServicePorts setServicePorts = new ServicePorts(setServicePortsBuilder.build()); final Map<ServiceEndpoint, ServicePorts> setRegistration = ImmutableMap.of( ServiceEndpoint.of("set_service", "set_proto"), setServicePorts); final Integer setGracePeriod = 120; final Map<String, String> setVolumes = ImmutableMap.of("/set", "/volume"); final Date setExpires = new Date(); final String setRegistrationDomain = "my.domain"; final String setCreatingUser = "username"; final Resources setResources = new Resources(10485760L, 10485761L, 4L, "1"); final HealthCheck setHealthCheck = HealthCheck.newHttpHealthCheck() .setPath("/healthcheck") .setPort("set_ports") .build(); final List<String> setSecurityOpt = Lists.newArrayList("label:user:dxia", "apparmor:foo"); final String setNetworkMode = "host"; final Map<String, String> setMetadata = ImmutableMap.of("set_metadata_key", "set_metadata_val"); final Set<String> setAddCapabilities = ImmutableSet.of("set_cap_add1", "set_cap_add2"); final Set<String> setDropCapabilities = ImmutableSet.of("set_cap_drop1", "set_cap_drop2"); // Input to addXXX final Map<String, String> addEnv = ImmutableMap.of("add", "env"); final Map<String, PortMapping> addPorts = ImmutableMap.of("add_ports", PortMapping.of(4711)); final ImmutableMap.Builder<String, ServicePortParameters> addServicePortsBuilder = ImmutableMap.builder(); addServicePortsBuilder.put("add_ports1", new ServicePortParameters( ImmutableList.of("tag1", "tag2"))); addServicePortsBuilder.put("add_ports2", new ServicePortParameters( ImmutableList.of("tag3", "tag4"))); final ServicePorts addServicePorts = new ServicePorts(addServicePortsBuilder.build()); final Map<ServiceEndpoint, ServicePorts> addRegistration = ImmutableMap.of( ServiceEndpoint.of("add_service", "add_proto"), addServicePorts); final Map<String, String> addVolumes = ImmutableMap.of("/add", "/volume"); final Map<String, String> addMetadata = ImmutableMap.of("add_metadata_key", "add_metadata_val"); // Expected output from getXXX final String expectedName = setName; final String expectedVersion = setVersion; final String expectedImage = setImage; final String expectedHostname = setHostname; final List<String> expectedCommand = setCommand; final Map<String, String> expectedEnv = concat(setEnv, addEnv); final Map<String, PortMapping> expectedPorts = concat(setPorts, addPorts); final Map<ServiceEndpoint, ServicePorts> expectedRegistration = concat(setRegistration, addRegistration); final Integer expectedGracePeriod = setGracePeriod; final Map<String, String> expectedVolumes = concat(setVolumes, addVolumes); final Date expectedExpires = setExpires; final String expectedRegistrationDomain = setRegistrationDomain; final String expectedCreatingUser = setCreatingUser; final Resources expectedResources = setResources; final HealthCheck expectedHealthCheck = setHealthCheck; final List<String> expectedSecurityOpt = setSecurityOpt; final String expectedNetworkMode = setNetworkMode; final Map<String, String> expectedMetadata = concat(setMetadata, addMetadata); final Set<String> expectedAddCapabilities = setAddCapabilities; final Set<String> expectedDropCapabilities = setDropCapabilities; // Check setXXX methods builder.setName(setName); builder.setVersion(setVersion); builder.setImage(setImage); builder.setHostname(setHostname); builder.setCommand(setCommand); builder.setEnv(setEnv); builder.setPorts(setPorts); builder.setRegistration(setRegistration); builder.setGracePeriod(setGracePeriod); builder.setVolumes(setVolumes); builder.setExpires(setExpires); builder.setRegistrationDomain(setRegistrationDomain); builder.setCreatingUser(setCreatingUser); builder.setResources(setResources); builder.setHealthCheck(setHealthCheck); builder.setSecurityOpt(setSecurityOpt); builder.setNetworkMode(setNetworkMode); builder.setMetadata(setMetadata); builder.setAddCapabilities(setAddCapabilities); builder.setDropCapabilities(setDropCapabilities); // Check addXXX methods for (final Map.Entry<String, String> entry : addEnv.entrySet()) { builder.addEnv(entry.getKey(), entry.getValue()); } for (final Map.Entry<String, PortMapping> entry : addPorts.entrySet()) { builder.addPort(entry.getKey(), entry.getValue()); } for (final Map.Entry<ServiceEndpoint, ServicePorts> entry : addRegistration.entrySet()) { builder.addRegistration(entry.getKey(), entry.getValue()); } for (final Map.Entry<String, String> entry : addVolumes.entrySet()) { builder.addVolume(entry.getKey(), entry.getValue()); } for (final Map.Entry<String, String> entry : addMetadata.entrySet()) { builder.addMetadata(entry.getKey(), entry.getValue()); } assertEquals("name", expectedName, builder.getName()); assertEquals("version", expectedVersion, builder.getVersion()); assertEquals("image", expectedImage, builder.getImage()); assertEquals("hostname", expectedHostname, builder.getHostname()); assertEquals("command", expectedCommand, builder.getCommand()); assertEquals("env", expectedEnv, builder.getEnv()); assertEquals("ports", expectedPorts, builder.getPorts()); assertEquals("registration", expectedRegistration, builder.getRegistration()); assertEquals("gracePeriod", expectedGracePeriod, builder.getGracePeriod()); assertEquals("volumes", expectedVolumes, builder.getVolumes()); assertEquals("expires", expectedExpires, builder.getExpires()); assertEquals("registrationDomain", expectedRegistrationDomain, builder.getRegistrationDomain()); assertEquals("creatingUser", expectedCreatingUser, builder.getCreatingUser()); assertEquals("resources", expectedResources, builder.getResources()); assertEquals("healthCheck", expectedHealthCheck, builder.getHealthCheck()); assertEquals("securityOpt", expectedSecurityOpt, builder.getSecurityOpt()); assertEquals("networkMode", expectedNetworkMode, builder.getNetworkMode()); assertEquals("metadata", expectedMetadata, builder.getMetadata()); assertEquals("addCapabilities", expectedAddCapabilities, builder.getAddCapabilities()); assertEquals("dropCapabilities", expectedDropCapabilities, builder.getDropCapabilities()); // Check final output final Job job = builder.build(); assertEquals("name", expectedName, job.getId().getName()); assertEquals("version", expectedVersion, job.getId().getVersion()); assertEquals("image", expectedImage, job.getImage()); assertEquals("hostname", expectedHostname, job.getHostname()); assertEquals("command", expectedCommand, job.getCommand()); assertEquals("env", expectedEnv, job.getEnv()); assertEquals("ports", expectedPorts, job.getPorts()); assertEquals("registration", expectedRegistration, job.getRegistration()); assertEquals("gracePeriod", expectedGracePeriod, job.getGracePeriod()); assertEquals("volumes", expectedVolumes, job.getVolumes()); assertEquals("expires", expectedExpires, job.getExpires()); assertEquals("registrationDomain", expectedRegistrationDomain, job.getRegistrationDomain()); assertEquals("creatingUser", expectedCreatingUser, job.getCreatingUser()); assertEquals("resources", expectedResources, job.getResources()); assertEquals("healthCheck", expectedHealthCheck, job.getHealthCheck()); assertEquals("securityOpt", expectedSecurityOpt, job.getSecurityOpt()); assertEquals("networkMode", expectedNetworkMode, job.getNetworkMode()); assertEquals("metadata", expectedMetadata, job.getMetadata()); assertEquals("addCapabilities", expectedAddCapabilities, job.getAddCapabilities()); assertEquals("dropCapabilities", expectedDropCapabilities, job.getDropCapabilities()); // Check toBuilder final Job.Builder rebuilder = job.toBuilder(); assertEquals("name", expectedName, rebuilder.getName()); assertEquals("version", expectedVersion, rebuilder.getVersion()); assertEquals("image", expectedImage, rebuilder.getImage()); assertEquals("hostname", expectedHostname, rebuilder.getHostname()); assertEquals("command", expectedCommand, rebuilder.getCommand()); assertEquals("env", expectedEnv, rebuilder.getEnv()); assertEquals("ports", expectedPorts, rebuilder.getPorts()); assertEquals("registration", expectedRegistration, rebuilder.getRegistration()); assertEquals("gracePeriod", expectedGracePeriod, rebuilder.getGracePeriod()); assertEquals("volumes", expectedVolumes, rebuilder.getVolumes()); assertEquals("expires", expectedExpires, rebuilder.getExpires()); assertEquals("registrationDomain", expectedRegistrationDomain, rebuilder.getRegistrationDomain()); assertEquals("creatingUser", expectedCreatingUser, rebuilder.getCreatingUser()); assertEquals("resources", expectedResources, rebuilder.getResources()); assertEquals("healthCheck", expectedHealthCheck, rebuilder.getHealthCheck()); assertEquals("securityOpt", expectedSecurityOpt, rebuilder.getSecurityOpt()); assertEquals("networkMode", expectedNetworkMode, rebuilder.getNetworkMode()); assertEquals("metadata", expectedMetadata, rebuilder.getMetadata()); assertEquals("addCapabilities", expectedAddCapabilities, rebuilder.getAddCapabilities()); assertEquals("dropCapabilities", expectedDropCapabilities, rebuilder.getDropCapabilities()); // Check clone final Job.Builder cloned = builder.clone(); assertEquals("name", expectedName, cloned.getName()); assertEquals("version", expectedVersion, cloned.getVersion()); assertEquals("image", expectedImage, cloned.getImage()); assertEquals("hostname", expectedHostname, cloned.getHostname()); assertEquals("command", expectedCommand, cloned.getCommand()); assertEquals("env", expectedEnv, cloned.getEnv()); assertEquals("ports", expectedPorts, cloned.getPorts()); assertEquals("registration", expectedRegistration, cloned.getRegistration()); assertEquals("gracePeriod", expectedGracePeriod, cloned.getGracePeriod()); assertEquals("volumes", expectedVolumes, cloned.getVolumes()); assertEquals("expires", expectedExpires, cloned.getExpires()); assertEquals("registrationDomain", expectedRegistrationDomain, cloned.getRegistrationDomain()); assertEquals("creatingUser", expectedCreatingUser, cloned.getCreatingUser()); assertEquals("resources", expectedResources, cloned.getResources()); assertEquals("healthCheck", expectedHealthCheck, cloned.getHealthCheck()); assertEquals("securityOpt", expectedSecurityOpt, cloned.getSecurityOpt()); assertEquals("networkMode", expectedNetworkMode, cloned.getNetworkMode()); assertEquals("metadata", expectedMetadata, cloned.getMetadata()); assertEquals("addCapabilities", expectedAddCapabilities, cloned.getAddCapabilities()); assertEquals("dropCapabilities", expectedDropCapabilities, cloned.getDropCapabilities()); final Job clonedJob = cloned.build(); assertEquals("name", expectedName, clonedJob.getId().getName()); assertEquals("version", expectedVersion, clonedJob.getId().getVersion()); assertEquals("image", expectedImage, clonedJob.getImage()); assertEquals("hostname", expectedHostname, clonedJob.getHostname()); assertEquals("command", expectedCommand, clonedJob.getCommand()); assertEquals("env", expectedEnv, clonedJob.getEnv()); assertEquals("ports", expectedPorts, clonedJob.getPorts()); assertEquals("registration", expectedRegistration, clonedJob.getRegistration()); assertEquals("gracePeriod", expectedGracePeriod, clonedJob.getGracePeriod()); assertEquals("volumes", expectedVolumes, clonedJob.getVolumes()); assertEquals("expires", expectedExpires, clonedJob.getExpires()); assertEquals("registrationDomain", expectedRegistrationDomain, clonedJob.getRegistrationDomain()); assertEquals("creatingUser", expectedCreatingUser, clonedJob.getCreatingUser()); assertEquals("resources", expectedResources, clonedJob.getResources()); assertEquals("healthCheck", expectedHealthCheck, clonedJob.getHealthCheck()); assertEquals("securityOpt", expectedSecurityOpt, clonedJob.getSecurityOpt()); assertEquals("networkMode", expectedNetworkMode, clonedJob.getNetworkMode()); assertEquals("metadata", expectedMetadata, clonedJob.getMetadata()); assertEquals("addCapabilities", expectedAddCapabilities, clonedJob.getAddCapabilities()); assertEquals("dropCapabilities", expectedDropCapabilities, clonedJob.getDropCapabilities()); } @SafeVarargs private final <K, V> Map<K, V> concat(final Map<K, V>... maps) { final ImmutableMap.Builder<K, V> b = ImmutableMap.builder(); for (final Map<K, V> map : maps) { b.putAll(map); } return b.build(); } /** Verify the Builder allows calling addFoo() before setFoo() for collection types. */ @Test public void testBuilderAddBeforeSet() throws Exception { final Job job = Job.newBuilder() .addEnv("env", "var") .addMetadata("meta", "data") .addPort("http", PortMapping.of(80, 8000)) .addRegistration(ServiceEndpoint.of("foo", "http"), ServicePorts.of("http")) .addVolume("/foo", "/bar") .build(); assertThat(job.getEnv(), hasEntry("env", "var")); assertThat(job.getMetadata(), hasEntry("meta", "data")); assertThat(job.getPorts(), hasEntry("http", PortMapping.of(80, 8000))); assertThat(job.getRegistration(), hasEntry(ServiceEndpoint.of("foo", "http"), ServicePorts.of("http"))); assertThat(job.getVolumes(), hasEntry("/foo", "/bar")); } @Test public void verifySha1Id() throws IOException { final Map<String, Object> expectedConfig = map("command", asList("foo", "bar"), "image", "foobar:4711", "name", "foozbarz", "version", "17"); final String expectedInput = "foozbarz:17:" + hex(Json.sha1digest(expectedConfig)); final String expectedDigest = hex(Hash.sha1digest(expectedInput.getBytes(UTF_8))); final JobId expectedId = JobId.fromString("foozbarz:17:" + expectedDigest); final Job job = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .build(); assertEquals(expectedId, job.getId()); } @Test public void verifySha1IdWithEnv() throws IOException { final Map<String, String> env = ImmutableMap.of("FOO", "BAR"); final Map<String, Object> expectedConfig = map("command", asList("foo", "bar"), "image", "foobar:4711", "name", "foozbarz", "version", "17", "env", env); final String expectedInput = "foozbarz:17:" + hex(Json.sha1digest(expectedConfig)); final String expectedDigest = hex(Hash.sha1digest(expectedInput.getBytes(UTF_8))); final JobId expectedId = JobId.fromString("foozbarz:17:" + expectedDigest); final Job job = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .setEnv(env) .build(); assertEquals(expectedId, job.getId()); } private String hex(final byte[] bytes) { return BaseEncoding.base16().lowerCase().encode(bytes); } @Test public void verifyCanParseJobWithUnknownFields() throws Exception { final Job job = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .build(); final String jobJson = job.toJsonString(); final ObjectMapper objectMapper = new ObjectMapper(); final Map<String, Object> fields = objectMapper.readValue( jobJson, new TypeReference<Map<String, Object>>() {}); fields.put("UNKNOWN_FIELD", "FOOBAR"); final String modifiedJobJson = objectMapper.writeValueAsString(fields); final Job parsedJob = parse(modifiedJobJson, Job.class); assertEquals(job, parsedJob); } @Test public void verifyCanParseJobWithMissingEnv() throws Exception { final Job job = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .build(); removeFieldAndParse(job, "env"); } @Test public void verifyCanParseJobWithMissingMetadata() throws Exception { final Job job = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .build(); removeFieldAndParse(job, "metadata"); } private static void removeFieldAndParse(final Job job, final String... fieldNames) throws Exception { final String jobJson = job.toJsonString(); final ObjectMapper objectMapper = new ObjectMapper(); final Map<String, Object> fields = objectMapper.readValue( jobJson, new TypeReference<Map<String, Object>>() {}); for (final String field : fieldNames) { fields.remove(field); } final String modifiedJobJson = objectMapper.writeValueAsString(fields); final Job parsedJob = parse(modifiedJobJson, Job.class); assertEquals(job, parsedJob); } @Test public void verifyJobIsImmutable() { final List<String> expectedCommand = ImmutableList.of("foo"); final Map<String, String> expectedEnv = ImmutableMap.of("e1", "1"); final Map<String, String> expectedMetadata = ImmutableMap.of("foo", "bar"); final Map<String, PortMapping> expectedPorts = ImmutableMap.of("p1", PortMapping.of(1, 2)); final Map<ServiceEndpoint, ServicePorts> expectedRegistration = ImmutableMap.of(ServiceEndpoint.of("foo", "tcp"), ServicePorts.of("p1")); final Integer expectedGracePeriod = 240; final List<String> mutableCommand = Lists.newArrayList(expectedCommand); final Map<String, String> mutableEnv = Maps.newHashMap(expectedEnv); final Map<String, String> mutableMetadata = Maps.newHashMap(expectedMetadata); final Map<String, PortMapping> mutablePorts = Maps.newHashMap(expectedPorts); final Map<ServiceEndpoint, ServicePorts> mutableRegistration = Maps.newHashMap(expectedRegistration); final Job.Builder builder = Job.newBuilder() .setCommand(mutableCommand) .setEnv(mutableEnv) .setMetadata(mutableMetadata) .setPorts(mutablePorts) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17") .setRegistration(mutableRegistration) .setGracePeriod(expectedGracePeriod); final Job job = builder.build(); mutableCommand.add("bar"); mutableEnv.put("e2", "2"); mutableMetadata.put("some", "thing"); mutablePorts.put("p2", PortMapping.of(3, 4)); mutableRegistration.put(ServiceEndpoint.of("bar", "udp"), ServicePorts.of("p2")); builder.addEnv("added_env", "FOO"); builder.addMetadata("added", "data"); builder.addPort("added_port", PortMapping.of(4711)); builder.addRegistration(ServiceEndpoint.of("added_reg", "added_proto"), ServicePorts.of("added_port")); builder.setGracePeriod(480); assertEquals(expectedCommand, job.getCommand()); assertEquals(expectedEnv, job.getEnv()); assertEquals(expectedMetadata, job.getMetadata()); assertEquals(expectedPorts, job.getPorts()); assertEquals(expectedRegistration, job.getRegistration()); assertEquals(expectedGracePeriod, job.getGracePeriod()); } @Test public void testChangingPortTagsChangesJobHash() { final Job j = Job.newBuilder().setName("foo").setVersion("1").setImage("foobar").build(); final Job.Builder builder = j.toBuilder(); final Map<String, PortMapping> ports = ImmutableMap.of("add_ports1", PortMapping.of(1234), "add_ports2", PortMapping.of(2345)); final ImmutableMap.Builder<String, ServicePortParameters> servicePortsBuilder = ImmutableMap.builder(); servicePortsBuilder.put("add_ports1", new ServicePortParameters( ImmutableList.of("tag1", "tag2"))); servicePortsBuilder.put("add_ports2", new ServicePortParameters( ImmutableList.of("tag3", "tag4"))); final ServicePorts servicePorts = new ServicePorts(servicePortsBuilder.build()); final Map<ServiceEndpoint, ServicePorts> oldRegistration = ImmutableMap.of( ServiceEndpoint.of("add_service", "add_proto"), servicePorts); final Job job = builder.setPorts(ports).setRegistration(oldRegistration).build(); final ImmutableMap.Builder<String, ServicePortParameters> newServicePortsBuilder = ImmutableMap.builder(); newServicePortsBuilder.put("add_ports1", new ServicePortParameters( ImmutableList.of("tag1", "newtag"))); newServicePortsBuilder.put("add_ports2", new ServicePortParameters( ImmutableList.of("tag3", "tag4"))); final ServicePorts newServicePorts = new ServicePorts(newServicePortsBuilder.build()); final Map<ServiceEndpoint, ServicePorts> newRegistration = ImmutableMap.of( ServiceEndpoint.of("add_service", "add_proto"), newServicePorts); final Job newJob = builder.setRegistration(newRegistration).build(); assertNotEquals(job.getId().getHash(), newJob.getId().getHash()); } @Test public void testBuildWithoutHash() { final Job.Builder builder = Job.newBuilder() .setCommand(asList("foo", "bar")) .setImage("foobar:4711") .setName("foozbarz") .setVersion("17"); assertNull(builder.buildWithoutHash().getId().getHash()); assertNotNull(builder.build().getId().getHash()); } }
/* Copyright (c) 2012-2016 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Johnathan Garrett (LMN Solutions) - initial implementation */ package org.locationtech.geogig.test.integration; import java.util.Iterator; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.locationtech.geogig.model.NodeRef; import org.locationtech.geogig.model.ObjectId; import org.locationtech.geogig.model.Ref; import org.locationtech.geogig.model.RevCommit; import org.locationtech.geogig.model.RevObject; import org.locationtech.geogig.model.impl.RevFeatureBuilder; import org.locationtech.geogig.plumbing.RefParse; import org.locationtech.geogig.plumbing.RevObjectParse; import org.locationtech.geogig.porcelain.AddOp; import org.locationtech.geogig.porcelain.BranchCreateOp; import org.locationtech.geogig.porcelain.CheckoutOp; import org.locationtech.geogig.porcelain.CherryPickOp; import org.locationtech.geogig.porcelain.CommitOp; import org.locationtech.geogig.porcelain.ConfigOp; import org.locationtech.geogig.porcelain.ConfigOp.ConfigAction; import org.locationtech.geogig.porcelain.ConflictsException; import org.locationtech.geogig.porcelain.LogOp; import org.locationtech.geogig.porcelain.NothingToCommitException; import org.opengis.feature.Feature; import com.google.common.base.Optional; import com.google.common.base.Suppliers; public class CherryPickOpTest extends RepositoryTestCase { @Rule public ExpectedException exception = ExpectedException.none(); @Override protected void setUpInternal() throws Exception { // These values should be used during a commit to set author/committer // TODO: author/committer roles need to be defined better, but for // now they are the same thing. repo.command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setName("user.name") .setValue("groldan").call(); repo.command(ConfigOp.class).setAction(ConfigAction.CONFIG_SET).setName("user.email") .setValue("groldan@boundlessgeo.com").call(); } @Test public void testCherryPick() throws Exception { // Create the following revision graph // o // | // o - master - Points 1 added // .\ // . o - Points 2 added // . | // . o - Points 3 added // . | // . o - Lines 1 added // . | // . o - branch1 - Lines 2 added insertAndAdd(points1); final RevCommit c1 = geogig.command(CommitOp.class).setMessage("commit for " + idP1).call(); // create branch1 and checkout geogig.command(BranchCreateOp.class).setAutoCheckout(true).setName("branch1").call(); insertAndAdd(points2); final RevCommit c2 = geogig.command(CommitOp.class).setMessage("commit for " + idP2).call(); insertAndAdd(points3); final RevCommit c3 = geogig.command(CommitOp.class).setMessage("commit for " + idP3).call(); insertAndAdd(lines1); final RevCommit c4 = geogig.command(CommitOp.class).setMessage("commit for " + idL1).call(); insertAndAdd(lines2); final RevCommit c5 = geogig.command(CommitOp.class).setMessage("commit for " + idL2).call(); // Cherry pick several commits to create the following revision graph // o // | // o - Points 1 added // | // o - Lines 2 added // | // o - Points 3 added // | // o - master - Points 2 added // switch back to master geogig.command(CheckoutOp.class).setSource("master").call(); CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(c5.getId())); RevCommit commit2 = cherryPick.call(); assertEquals(c5.getAuthor(), commit2.getAuthor()); assertEquals(c5.getCommitter().getName(), commit2.getCommitter().getName()); assertEquals(c5.getMessage(), commit2.getMessage()); assertFalse(c5.getCommitter().getTimestamp() == commit2.getCommitter().getTimestamp()); assertFalse(c5.getTreeId().equals(commit2.getTreeId())); cherryPick.setCommit(Suppliers.ofInstance(c3.getId())); RevCommit commit3 = cherryPick.call(); assertEquals(c3.getAuthor(), commit3.getAuthor()); assertEquals(c3.getCommitter().getName(), commit3.getCommitter().getName()); assertEquals(c3.getMessage(), commit3.getMessage()); assertFalse(c3.getCommitter().getTimestamp() == commit3.getCommitter().getTimestamp()); assertFalse(c3.getTreeId().equals(commit3.getTreeId())); cherryPick.setCommit(Suppliers.ofInstance(c2.getId())); RevCommit commit4 = cherryPick.call(); assertEquals(c2.getAuthor(), commit4.getAuthor()); assertEquals(c2.getCommitter().getName(), commit4.getCommitter().getName()); assertEquals(c2.getCommitter().getEmail(), commit4.getCommitter().getEmail()); assertEquals(c2.getMessage(), commit4.getMessage()); assertFalse(c2.getCommitter().getTimestamp() == commit4.getCommitter().getTimestamp()); assertFalse(c2.getTreeId().equals(commit4.getTreeId())); // cherryPick.setCommit(Suppliers.ofInstance(c4.getId())); // RevCommit commit5 = cherryPick.call(); // // assertEquals(c4.getMessage(), commit5.getMessage()); // assertEquals(c4.getAuthor().getName(), commit5.getAuthor().getName()); // assertEquals(c4.getAuthor().getEmail(), commit5.getAuthor().getEmail()); // assertEquals(c4.getCommitter().getName(), commit5.getCommitter().getName()); // assertFalse(c4.getCommitter().getTimestamp() == commit5.getCommitter().getTimestamp()); // assertFalse(c4.getTreeId().equals(commit5.getTreeId())); Iterator<RevCommit> log = geogig.command(LogOp.class).call(); // Commit 5 // RevCommit logC5 = log.next(); // assertEquals(commit5, logC5); // Commit 4 RevCommit logC4 = log.next(); assertEquals(commit4, logC4); // Commit 3 RevCommit logC3 = log.next(); assertEquals(commit3, logC3); // Commit 2 RevCommit logC2 = log.next(); assertEquals(commit2, logC2); // Commit 1 RevCommit logC1 = log.next(); assertEquals(c1, logC1); assertFalse(log.hasNext()); } @Test public void testCherryPickWithConflicts() throws Exception { insertAndAdd(points1); geogig.command(CommitOp.class).call(); // create branch1, checkout and add a commit geogig.command(BranchCreateOp.class).setAutoCheckout(true).setName("branch1").call(); insert(points2); Feature points1Modified = feature(pointsType, idP1, "StringProp1_2", new Integer(1000), "POINT(1 1)"); insertAndAdd(points1Modified); geogig.command(AddOp.class).call(); RevCommit branchCommit = geogig.command(CommitOp.class).call(); geogig.command(CheckoutOp.class).setSource(Ref.MASTER).call(); Feature points1ModifiedB = feature(pointsType, idP1, "StringProp1_3", new Integer(2000), "POINT(1 1)"); insert(points1ModifiedB); geogig.command(AddOp.class).call(); geogig.command(CommitOp.class).call(); try { geogig.command(CherryPickOp.class).setCommit(Suppliers.ofInstance(branchCommit.getId())) .call(); fail("Expected ConflictsException"); } catch (ConflictsException e) { assertTrue(e.getMessage().contains("conflict in Points/Points.1")); } Optional<Ref> cherrypickHead = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD) .call(); assertTrue(cherrypickHead.isPresent()); // check that unconflicted changes are in index and working tree Optional<RevObject> pts2 = geogig.command(RevObjectParse.class) .setRefSpec(Ref.WORK_HEAD + ":" + NodeRef.appendChild(pointsName, idP2)).call(); assertTrue(pts2.isPresent()); assertEquals(RevFeatureBuilder.build(points2), pts2.get()); pts2 = geogig.command(RevObjectParse.class) .setRefSpec(Ref.STAGE_HEAD + ":" + NodeRef.appendChild(pointsName, idP2)).call(); assertTrue(pts2.isPresent()); assertEquals(RevFeatureBuilder.build(points2), pts2.get()); // solve and commit Feature points1Solved = feature(pointsType, idP1, "StringProp1_2", new Integer(2000), "POINT(1 1)"); insert(points1Solved); geogig.command(AddOp.class).call(); geogig.command(CommitOp.class).setCommit(branchCommit).call(); Optional<RevObject> ptsSolved = geogig.command(RevObjectParse.class) .setRefSpec(Ref.WORK_HEAD + ":" + NodeRef.appendChild(pointsName, idP1)).call(); assertTrue(pts2.isPresent()); assertEquals(RevFeatureBuilder.build(points1Solved), ptsSolved.get()); cherrypickHead = geogig.command(RefParse.class).setName(Ref.CHERRY_PICK_HEAD).call(); assertFalse(cherrypickHead.isPresent()); } @Test public void testCherryPickInvalidCommit() throws Exception { CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(ObjectId.NULL)); exception.expect(IllegalArgumentException.class); cherryPick.call(); } @Test public void testCherryPickDirtyWorkTree() throws Exception { insertAndAdd(points1); geogig.command(CommitOp.class).setMessage("commit for " + idP1).call(); // create branch1 and checkout geogig.command(BranchCreateOp.class).setAutoCheckout(true).setName("branch1").call(); insertAndAdd(points2); RevCommit c1 = geogig.command(CommitOp.class).setMessage("commit for " + idP2).call(); // checkout master and insert some features geogig.command(CheckoutOp.class).setSource("master").call(); insert(points3); CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(c1.getId())); exception.expect(IllegalStateException.class); cherryPick.call(); } @Test public void testCherryPickDirtyIndex() throws Exception { insertAndAdd(points1); geogig.command(CommitOp.class).setMessage("commit for " + idP1).call(); // create branch1 and checkout geogig.command(BranchCreateOp.class).setAutoCheckout(true).setName("branch1").call(); insertAndAdd(points2); RevCommit c1 = geogig.command(CommitOp.class).setMessage("commit for " + idP2).call(); // checkout master and insert some features geogig.command(CheckoutOp.class).setSource("master").call(); insertAndAdd(points3); CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(c1.getId())); exception.expect(IllegalStateException.class); cherryPick.call(); } @Ignore // this test probably does not make sense with the current behaviour of cherry pick @Test public void testCherryPickRootCommit() throws Exception { insertAndAdd(points1); final RevCommit c1 = geogig.command(CommitOp.class).setMessage("commit for " + idP1).call(); CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(c1.getId())); cherryPick.call(); Iterator<RevCommit> log = geogig.command(LogOp.class).call(); // Commit 2 RevCommit logC2 = log.next(); assertEquals(c1.getMessage(), logC2.getMessage()); assertEquals(c1.getAuthor(), logC2.getAuthor()); assertEquals(c1.getCommitter().getName(), logC2.getCommitter().getName()); assertEquals(c1.getCommitter().getEmail(), logC2.getCommitter().getEmail()); assertFalse(c1.getCommitter().getTimestamp() == logC2.getCommitter().getTimestamp()); assertEquals(c1.getTreeId(), logC2.getTreeId()); // Commit 1 RevCommit logC1 = log.next(); assertEquals(c1, logC1); assertFalse(log.hasNext()); } @Test public void testCherryPickExistingCommit() throws Exception { insertAndAdd(points1); final RevCommit c1 = geogig.command(CommitOp.class).setMessage("commit for " + idP1).call(); CherryPickOp cherryPick = geogig.command(CherryPickOp.class); cherryPick.setCommit(Suppliers.ofInstance(c1.getId())); exception.expect(NothingToCommitException.class); cherryPick.call(); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.view; import com.android.internal.view.menu.MenuItemImpl; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import android.app.Activity; import android.content.Context; import android.content.res.TypedArray; import android.content.res.XmlResourceParser; import android.util.AttributeSet; import android.util.Log; import android.util.Xml; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; /** * This class is used to instantiate menu XML files into Menu objects. * <p> * For performance reasons, menu inflation relies heavily on pre-processing of * XML files that is done at build time. Therefore, it is not currently possible * to use MenuInflater with an XmlPullParser over a plain XML file at runtime; * it only works with an XmlPullParser returned from a compiled resource (R. * <em>something</em> file.) */ public class MenuInflater { private static final String LOG_TAG = "MenuInflater"; /** Menu tag name in XML. */ private static final String XML_MENU = "menu"; /** Group tag name in XML. */ private static final String XML_GROUP = "group"; /** Item tag name in XML. */ private static final String XML_ITEM = "item"; private static final int NO_ID = 0; private static final Class<?>[] ACTION_VIEW_CONSTRUCTOR_SIGNATURE = new Class[] {Context.class}; private static final Class<?>[] ACTION_PROVIDER_CONSTRUCTOR_SIGNATURE = ACTION_VIEW_CONSTRUCTOR_SIGNATURE; private final Object[] mActionViewConstructorArguments; private final Object[] mActionProviderConstructorArguments; private Context mContext; private Object mRealOwner; /** * Constructs a menu inflater. * * @see Activity#getMenuInflater() */ public MenuInflater(Context context) { mContext = context; mRealOwner = context; mActionViewConstructorArguments = new Object[] {context}; mActionProviderConstructorArguments = mActionViewConstructorArguments; } /** * Constructs a menu inflater. * * @see Activity#getMenuInflater() * @hide */ public MenuInflater(Context context, Object realOwner) { mContext = context; mRealOwner = realOwner; mActionViewConstructorArguments = new Object[] {context}; mActionProviderConstructorArguments = mActionViewConstructorArguments; } /** * Inflate a menu hierarchy from the specified XML resource. Throws * {@link InflateException} if there is an error. * * @param menuRes Resource ID for an XML layout resource to load (e.g., * <code>R.menu.main_activity</code>) * @param menu The Menu to inflate into. The items and submenus will be * added to this Menu. */ public void inflate(int menuRes, Menu menu) { XmlResourceParser parser = null; try { parser = mContext.getResources().getLayout(menuRes); AttributeSet attrs = Xml.asAttributeSet(parser); parseMenu(parser, attrs, menu); } catch (XmlPullParserException e) { throw new InflateException("Error inflating menu XML", e); } catch (IOException e) { throw new InflateException("Error inflating menu XML", e); } finally { if (parser != null) parser.close(); } } /** * Called internally to fill the given menu. If a sub menu is seen, it will * call this recursively. */ private void parseMenu(XmlPullParser parser, AttributeSet attrs, Menu menu) throws XmlPullParserException, IOException { MenuState menuState = new MenuState(menu); int eventType = parser.getEventType(); String tagName; boolean lookingForEndOfUnknownTag = false; String unknownTagName = null; // This loop will skip to the menu start tag do { if (eventType == XmlPullParser.START_TAG) { tagName = parser.getName(); if (tagName.equals(XML_MENU)) { // Go to next tag eventType = parser.next(); break; } throw new RuntimeException("Expecting menu, got " + tagName); } eventType = parser.next(); } while (eventType != XmlPullParser.END_DOCUMENT); boolean reachedEndOfMenu = false; while (!reachedEndOfMenu) { switch (eventType) { case XmlPullParser.START_TAG: if (lookingForEndOfUnknownTag) { break; } tagName = parser.getName(); if (tagName.equals(XML_GROUP)) { menuState.readGroup(attrs); } else if (tagName.equals(XML_ITEM)) { menuState.readItem(attrs); } else if (tagName.equals(XML_MENU)) { // A menu start tag denotes a submenu for an item SubMenu subMenu = menuState.addSubMenuItem(); // Parse the submenu into returned SubMenu parseMenu(parser, attrs, subMenu); } else { lookingForEndOfUnknownTag = true; unknownTagName = tagName; } break; case XmlPullParser.END_TAG: tagName = parser.getName(); if (lookingForEndOfUnknownTag && tagName.equals(unknownTagName)) { lookingForEndOfUnknownTag = false; unknownTagName = null; } else if (tagName.equals(XML_GROUP)) { menuState.resetGroup(); } else if (tagName.equals(XML_ITEM)) { // Add the item if it hasn't been added (if the item was // a submenu, it would have been added already) if (!menuState.hasAddedItem()) { if (menuState.itemActionProvider != null && menuState.itemActionProvider.hasSubMenu()) { menuState.addSubMenuItem(); } else { menuState.addItem(); } } } else if (tagName.equals(XML_MENU)) { reachedEndOfMenu = true; } break; case XmlPullParser.END_DOCUMENT: throw new RuntimeException("Unexpected end of document"); } eventType = parser.next(); } } private static class InflatedOnMenuItemClickListener implements MenuItem.OnMenuItemClickListener { private static final Class<?>[] PARAM_TYPES = new Class[] { MenuItem.class }; private Object mRealOwner; private Method mMethod; public InflatedOnMenuItemClickListener(Object realOwner, String methodName) { mRealOwner = realOwner; Class<?> c = realOwner.getClass(); try { mMethod = c.getMethod(methodName, PARAM_TYPES); } catch (Exception e) { InflateException ex = new InflateException( "Couldn't resolve menu item onClick handler " + methodName + " in class " + c.getName()); ex.initCause(e); throw ex; } } public boolean onMenuItemClick(MenuItem item) { try { if (mMethod.getReturnType() == Boolean.TYPE) { return (Boolean) mMethod.invoke(mRealOwner, item); } else { mMethod.invoke(mRealOwner, item); return true; } } catch (Exception e) { throw new RuntimeException(e); } } } /** * State for the current menu. * <p> * Groups can not be nested unless there is another menu (which will have * its state class). */ private class MenuState { private Menu menu; /* * Group state is set on items as they are added, allowing an item to * override its group state. (As opposed to set on items at the group end tag.) */ private int groupId; private int groupCategory; private int groupOrder; private int groupCheckable; private boolean groupVisible; private boolean groupEnabled; private boolean itemAdded; private int itemId; private int itemCategoryOrder; private CharSequence itemTitle; private CharSequence itemTitleCondensed; private int itemIconResId; private char itemAlphabeticShortcut; private char itemNumericShortcut; /** * Sync to attrs.xml enum: * - 0: none * - 1: all * - 2: exclusive */ private int itemCheckable; private boolean itemChecked; private boolean itemVisible; private boolean itemEnabled; /** * Sync to attrs.xml enum, values in MenuItem: * - 0: never * - 1: ifRoom * - 2: always * - -1: Safe sentinel for "no value". */ private int itemShowAsAction; private int itemActionViewLayout; private String itemActionViewClassName; private String itemActionProviderClassName; private String itemListenerMethodName; private ActionProvider itemActionProvider; private static final int defaultGroupId = NO_ID; private static final int defaultItemId = NO_ID; private static final int defaultItemCategory = 0; private static final int defaultItemOrder = 0; private static final int defaultItemCheckable = 0; private static final boolean defaultItemChecked = false; private static final boolean defaultItemVisible = true; private static final boolean defaultItemEnabled = true; public MenuState(final Menu menu) { this.menu = menu; resetGroup(); } public void resetGroup() { groupId = defaultGroupId; groupCategory = defaultItemCategory; groupOrder = defaultItemOrder; groupCheckable = defaultItemCheckable; groupVisible = defaultItemVisible; groupEnabled = defaultItemEnabled; } /** * Called when the parser is pointing to a group tag. */ public void readGroup(AttributeSet attrs) { TypedArray a = mContext.obtainStyledAttributes(attrs, com.android.internal.R.styleable.MenuGroup); groupId = a.getResourceId(com.android.internal.R.styleable.MenuGroup_id, defaultGroupId); groupCategory = a.getInt(com.android.internal.R.styleable.MenuGroup_menuCategory, defaultItemCategory); groupOrder = a.getInt(com.android.internal.R.styleable.MenuGroup_orderInCategory, defaultItemOrder); groupCheckable = a.getInt(com.android.internal.R.styleable.MenuGroup_checkableBehavior, defaultItemCheckable); groupVisible = a.getBoolean(com.android.internal.R.styleable.MenuGroup_visible, defaultItemVisible); groupEnabled = a.getBoolean(com.android.internal.R.styleable.MenuGroup_enabled, defaultItemEnabled); a.recycle(); } /** * Called when the parser is pointing to an item tag. */ public void readItem(AttributeSet attrs) { TypedArray a = mContext.obtainStyledAttributes(attrs, com.android.internal.R.styleable.MenuItem); // Inherit attributes from the group as default value itemId = a.getResourceId(com.android.internal.R.styleable.MenuItem_id, defaultItemId); final int category = a.getInt(com.android.internal.R.styleable.MenuItem_menuCategory, groupCategory); final int order = a.getInt(com.android.internal.R.styleable.MenuItem_orderInCategory, groupOrder); itemCategoryOrder = (category & Menu.CATEGORY_MASK) | (order & Menu.USER_MASK); itemTitle = a.getText(com.android.internal.R.styleable.MenuItem_title); itemTitleCondensed = a.getText(com.android.internal.R.styleable.MenuItem_titleCondensed); itemIconResId = a.getResourceId(com.android.internal.R.styleable.MenuItem_icon, 0); itemAlphabeticShortcut = getShortcut(a.getString(com.android.internal.R.styleable.MenuItem_alphabeticShortcut)); itemNumericShortcut = getShortcut(a.getString(com.android.internal.R.styleable.MenuItem_numericShortcut)); if (a.hasValue(com.android.internal.R.styleable.MenuItem_checkable)) { // Item has attribute checkable, use it itemCheckable = a.getBoolean(com.android.internal.R.styleable.MenuItem_checkable, false) ? 1 : 0; } else { // Item does not have attribute, use the group's (group can have one more state // for checkable that represents the exclusive checkable) itemCheckable = groupCheckable; } itemChecked = a.getBoolean(com.android.internal.R.styleable.MenuItem_checked, defaultItemChecked); itemVisible = a.getBoolean(com.android.internal.R.styleable.MenuItem_visible, groupVisible); itemEnabled = a.getBoolean(com.android.internal.R.styleable.MenuItem_enabled, groupEnabled); itemShowAsAction = a.getInt(com.android.internal.R.styleable.MenuItem_showAsAction, -1); itemListenerMethodName = a.getString(com.android.internal.R.styleable.MenuItem_onClick); itemActionViewLayout = a.getResourceId(com.android.internal.R.styleable.MenuItem_actionLayout, 0); itemActionViewClassName = a.getString(com.android.internal.R.styleable.MenuItem_actionViewClass); itemActionProviderClassName = a.getString(com.android.internal.R.styleable.MenuItem_actionProviderClass); final boolean hasActionProvider = itemActionProviderClassName != null; if (hasActionProvider && itemActionViewLayout == 0 && itemActionViewClassName == null) { itemActionProvider = newInstance(itemActionProviderClassName, ACTION_PROVIDER_CONSTRUCTOR_SIGNATURE, mActionProviderConstructorArguments); } else { if (hasActionProvider) { Log.w(LOG_TAG, "Ignoring attribute 'actionProviderClass'." + " Action view already specified."); } itemActionProvider = null; } a.recycle(); itemAdded = false; } private char getShortcut(String shortcutString) { if (shortcutString == null) { return 0; } else { return shortcutString.charAt(0); } } private void setItem(MenuItem item) { item.setChecked(itemChecked) .setVisible(itemVisible) .setEnabled(itemEnabled) .setCheckable(itemCheckable >= 1) .setTitleCondensed(itemTitleCondensed) .setIcon(itemIconResId) .setAlphabeticShortcut(itemAlphabeticShortcut) .setNumericShortcut(itemNumericShortcut); if (itemShowAsAction >= 0) { item.setShowAsAction(itemShowAsAction); } if (itemListenerMethodName != null) { if (mContext.isRestricted()) { throw new IllegalStateException("The android:onClick attribute cannot " + "be used within a restricted context"); } item.setOnMenuItemClickListener( new InflatedOnMenuItemClickListener(mRealOwner, itemListenerMethodName)); } if (item instanceof MenuItemImpl) { MenuItemImpl impl = (MenuItemImpl) item; if (itemCheckable >= 2) { impl.setExclusiveCheckable(true); } } boolean actionViewSpecified = false; if (itemActionViewClassName != null) { View actionView = (View) newInstance(itemActionViewClassName, ACTION_VIEW_CONSTRUCTOR_SIGNATURE, mActionViewConstructorArguments); item.setActionView(actionView); actionViewSpecified = true; } if (itemActionViewLayout > 0) { if (!actionViewSpecified) { item.setActionView(itemActionViewLayout); actionViewSpecified = true; } else { Log.w(LOG_TAG, "Ignoring attribute 'itemActionViewLayout'." + " Action view already specified."); } } if (itemActionProvider != null) { item.setActionProvider(itemActionProvider); } } public void addItem() { itemAdded = true; setItem(menu.add(groupId, itemId, itemCategoryOrder, itemTitle)); } public SubMenu addSubMenuItem() { itemAdded = true; SubMenu subMenu = menu.addSubMenu(groupId, itemId, itemCategoryOrder, itemTitle); setItem(subMenu.getItem()); return subMenu; } public boolean hasAddedItem() { return itemAdded; } @SuppressWarnings("unchecked") private <T> T newInstance(String className, Class<?>[] constructorSignature, Object[] arguments) { try { Class<?> clazz = mContext.getClassLoader().loadClass(className); Constructor<?> constructor = clazz.getConstructor(constructorSignature); return (T) constructor.newInstance(arguments); } catch (Exception e) { Log.w(LOG_TAG, "Cannot instantiate class: " + className, e); } return null; } } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.modules.websocket; import javax.annotation.Nullable; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import com.facebook.common.logging.FLog; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableMapKeySetIterator; import com.facebook.react.bridge.ReadableType; import com.facebook.react.bridge.WritableMap; import com.facebook.react.common.ReactConstants; import com.facebook.react.module.annotations.ReactModule; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.modules.network.ForwardingCookieHandler; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; import okhttp3.WebSocket; import okhttp3.WebSocketListener; import okio.ByteString; @ReactModule(name = "WebSocketModule", hasConstants = false) public class WebSocketModule extends ReactContextBaseJavaModule { private final Map<Integer, WebSocket> mWebSocketConnections = new HashMap<>(); private ReactContext mReactContext; private ForwardingCookieHandler mCookieHandler; public WebSocketModule(ReactApplicationContext context) { super(context); mReactContext = context; mCookieHandler = new ForwardingCookieHandler(context); } private void sendEvent(String eventName, WritableMap params) { mReactContext .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) .emit(eventName, params); } @Override public String getName() { return "WebSocketModule"; } @ReactMethod public void connect( final String url, @Nullable final ReadableArray protocols, @Nullable final ReadableMap headers, final int id) { OkHttpClient client = new OkHttpClient.Builder() .connectTimeout(10, TimeUnit.SECONDS) .writeTimeout(10, TimeUnit.SECONDS) .readTimeout(0, TimeUnit.MINUTES) // Disable timeouts for read .build(); Request.Builder builder = new Request.Builder() .tag(id) .url(url); String cookie = getCookie(url); if (cookie != null) { builder.addHeader("Cookie", cookie); } if (headers != null) { ReadableMapKeySetIterator iterator = headers.keySetIterator(); if (!headers.hasKey("origin")) { builder.addHeader("origin", getDefaultOrigin(url)); } while (iterator.hasNextKey()) { String key = iterator.nextKey(); if (ReadableType.String.equals(headers.getType(key))) { builder.addHeader(key, headers.getString(key)); } else { FLog.w( ReactConstants.TAG, "Ignoring: requested " + key + ", value not a string"); } } } else { builder.addHeader("origin", getDefaultOrigin(url)); } if (protocols != null && protocols.size() > 0) { StringBuilder protocolsValue = new StringBuilder(""); for (int i = 0; i < protocols.size(); i++) { String v = protocols.getString(i).trim(); if (!v.isEmpty() && !v.contains(",")) { protocolsValue.append(v); protocolsValue.append(","); } } if (protocolsValue.length() > 0) { protocolsValue.replace(protocolsValue.length() - 1, protocolsValue.length(), ""); builder.addHeader("Sec-WebSocket-Protocol", protocolsValue.toString()); } } client.newWebSocket(builder.build(), new WebSocketListener() { @Override public void onOpen(WebSocket webSocket, Response response) { mWebSocketConnections.put(id, webSocket); WritableMap params = Arguments.createMap(); params.putInt("id", id); sendEvent("websocketOpen", params); } @Override public void onClosed(WebSocket webSocket, int code, String reason) { WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putInt("code", code); params.putString("reason", reason); sendEvent("websocketClosed", params); } @Override public void onFailure(WebSocket webSocket, Throwable t, Response response) { notifyWebSocketFailed(id, t.getMessage()); } @Override public void onMessage(WebSocket webSocket, String text) { WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("data", text); params.putString("type", "text"); sendEvent("websocketMessage", params); } @Override public void onMessage(WebSocket webSocket, ByteString bytes) { String text = bytes.base64(); WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("data", text); params.putString("type", "binary"); sendEvent("websocketMessage", params); } }); // Trigger shutdown of the dispatcher's executor so this process can exit cleanly client.dispatcher().executorService().shutdown(); } @ReactMethod public void close(int code, String reason, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // WebSocket is already closed // Don't do anything, mirror the behaviour on web return; } try { client.close(code, reason); mWebSocketConnections.remove(id); } catch (Exception e) { FLog.e( ReactConstants.TAG, "Could not close WebSocket connection for id " + id, e); } } @ReactMethod public void send(String message, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { client.send(message); } catch (Exception e) { notifyWebSocketFailed(id, e.getMessage()); } } @ReactMethod public void sendBinary(String base64String, int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { client.send(ByteString.decodeBase64(base64String)); } catch (Exception e) { notifyWebSocketFailed(id, e.getMessage()); } } @ReactMethod public void ping(int id) { WebSocket client = mWebSocketConnections.get(id); if (client == null) { // This is a programmer error throw new RuntimeException("Cannot send a message. Unknown WebSocket id " + id); } try { client.send(ByteString.EMPTY); } catch (Exception e) { notifyWebSocketFailed(id, e.getMessage()); } } private void notifyWebSocketFailed(int id, String message) { WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("message", message); sendEvent("websocketFailed", params); } /** * Get the default HTTP(S) origin for a specific WebSocket URI * * @param String uri * @return A string of the endpoint converted to HTTP protocol (http[s]://host[:port]) */ private static String getDefaultOrigin(String uri) { try { String defaultOrigin; String scheme = ""; URI requestURI = new URI(uri); if (requestURI.getScheme().equals("wss")) { scheme += "https"; } else if (requestURI.getScheme().equals("ws")) { scheme += "http"; } else if (requestURI.getScheme().equals("http") || requestURI.getScheme().equals("https")) { scheme += requestURI.getScheme(); } if (requestURI.getPort() != -1) { defaultOrigin = String.format( "%s://%s:%s", scheme, requestURI.getHost(), requestURI.getPort()); } else { defaultOrigin = String.format("%s://%s/", scheme, requestURI.getHost()); } return defaultOrigin; } catch (URISyntaxException e) { throw new IllegalArgumentException("Unable to set " + uri + " as default origin header"); } } /** * Get the cookie for a specific domain * * @param String uri * @return The cookie header or null if none is set */ private String getCookie(String uri) { try { URI origin = new URI(getDefaultOrigin(uri)); Map<String, List<String>> cookieMap = mCookieHandler.get(origin, new HashMap()); List<String> cookieList = cookieMap.get("Cookie"); if (cookieList == null || cookieList.isEmpty()) { return null; } return cookieList.get(0); } catch (URISyntaxException | IOException e) { throw new IllegalArgumentException("Unable to get cookie from " + uri); } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.wm.impl; import com.intellij.ide.RemoteDesktopService; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.UISettingsListener; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.ui.ThreeComponentsSplitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.openapi.wm.ToolWindowType; import com.intellij.openapi.wm.ex.LayoutFocusTraversalPolicyExt; import com.intellij.openapi.wm.impl.commands.FinalizableCommand; import com.intellij.reference.SoftReference; import com.intellij.ui.OnePixelSplitter; import com.intellij.ui.components.JBLayeredPane; import com.intellij.ui.paint.PaintUtil; import com.intellij.util.Function; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.JBUI.ScaleContext; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.util.List; import java.util.*; import static com.intellij.util.ui.UIUtil.useSafely; /** * This panel contains all tool stripes and JLayeredPane at the center area. All tool windows are * located inside this layered pane. * * @author Anton Katilin * @author Vladimir Kondratyev */ public final class ToolWindowsPane extends JBLayeredPane implements UISettingsListener, Disposable { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.ToolWindowsPane"); private final IdeFrameImpl myFrame; private final Map<String, StripeButton> myId2Button = new HashMap<>(); private final Map<String, InternalDecorator> myId2Decorator = new HashMap<>(); private final Map<InternalDecorator, WindowInfoImpl> myDecorator2Info = new HashMap<>(); private final Map<String, Float> myId2SplitProportion = new HashMap<>(); private Pair<ToolWindow, Integer> myMaximizedProportion; /** * This panel is the layered pane where all sliding tool windows are located. The DEFAULT * layer contains splitters. The PALETTE layer contains all sliding tool windows. */ private final MyLayeredPane myLayeredPane; /* * Splitters. */ private final ThreeComponentsSplitter myVerticalSplitter; private final ThreeComponentsSplitter myHorizontalSplitter; /* * Tool stripes. */ private final Stripe myLeftStripe; private final Stripe myRightStripe; private final Stripe myBottomStripe; private final Stripe myTopStripe; private final List<Stripe> myStripes = new ArrayList<>(); private final ToolWindowManagerImpl myManager; private boolean myStripesOverlayed; private boolean myWidescreen; private boolean myLeftHorizontalSplit; private boolean myRightHorizontalSplit; ToolWindowsPane(@NotNull IdeFrameImpl frame, @NotNull ToolWindowManagerImpl manager) { myManager = manager; setOpaque(false); myFrame = frame; // Splitters myVerticalSplitter = new ThreeComponentsSplitter(true); myVerticalSplitter.setMinSize(JBUI.scale(30)); Disposer.register(this, myVerticalSplitter); myVerticalSplitter.setDividerWidth(0); myVerticalSplitter.setDividerMouseZoneSize(Registry.intValue("ide.splitter.mouseZone")); myVerticalSplitter.setBackground(Color.gray); myHorizontalSplitter = new ThreeComponentsSplitter(false); myHorizontalSplitter.setMinSize(JBUI.scale(30)); Disposer.register(this, myHorizontalSplitter); myHorizontalSplitter.setDividerWidth(0); myHorizontalSplitter.setDividerMouseZoneSize(Registry.intValue("ide.splitter.mouseZone")); myHorizontalSplitter.setBackground(Color.gray); myWidescreen = UISettings.getInstance().getWideScreenSupport(); myLeftHorizontalSplit = UISettings.getInstance().getLeftHorizontalSplit(); myRightHorizontalSplit = UISettings.getInstance().getRightHorizontalSplit(); if (myWidescreen) { myHorizontalSplitter.setInnerComponent(myVerticalSplitter); } else { myVerticalSplitter.setInnerComponent(myHorizontalSplitter); } // Tool stripes myTopStripe = new Stripe(SwingConstants.TOP, manager); myStripes.add(myTopStripe); myLeftStripe = new Stripe(SwingConstants.LEFT, manager); myStripes.add(myLeftStripe); myBottomStripe = new Stripe(SwingConstants.BOTTOM, manager); myStripes.add(myBottomStripe); myRightStripe = new Stripe(SwingConstants.RIGHT, manager); myStripes.add(myRightStripe); updateToolStripesVisibility(); // Layered pane myLayeredPane = new MyLayeredPane(myWidescreen ? myHorizontalSplitter : myVerticalSplitter); // Compose layout add(myTopStripe, JLayeredPane.POPUP_LAYER); add(myLeftStripe, JLayeredPane.POPUP_LAYER); add(myBottomStripe, JLayeredPane.POPUP_LAYER); add(myRightStripe, JLayeredPane.POPUP_LAYER); add(myLayeredPane, JLayeredPane.DEFAULT_LAYER); setFocusTraversalPolicy(new LayoutFocusTraversalPolicyExt()); } @Override public void doLayout() { Dimension size = getSize(); if (!myTopStripe.isVisible()) { myTopStripe.setBounds(0, 0, 0, 0); myBottomStripe.setBounds(0, 0, 0, 0); myLeftStripe.setBounds(0, 0, 0, 0); myRightStripe.setBounds(0, 0, 0, 0); myLayeredPane.setBounds(0, 0, getWidth(), getHeight()); } else { Dimension topSize = myTopStripe.getPreferredSize(); Dimension bottomSize = myBottomStripe.getPreferredSize(); Dimension leftSize = myLeftStripe.getPreferredSize(); Dimension rightSize = myRightStripe.getPreferredSize(); myTopStripe.setBounds(0, 0, size.width, topSize.height); myLeftStripe.setBounds(0, topSize.height, leftSize.width, size.height - topSize.height - bottomSize.height); myRightStripe .setBounds(size.width - rightSize.width, topSize.height, rightSize.width, size.height - topSize.height - bottomSize.height); myBottomStripe.setBounds(0, size.height - bottomSize.height, size.width, bottomSize.height); if (UISettings.getInstance().getHideToolStripes() || UISettings.getInstance().getPresentationMode()) { myLayeredPane.setBounds(0, 0, size.width, size.height); } else { myLayeredPane.setBounds(leftSize.width, topSize.height, size.width - leftSize.width - rightSize.width, size.height - topSize.height - bottomSize.height); } } } /** * Invoked when enclosed frame is being shown. */ @Override public final void addNotify() { super.addNotify(); } public Project getProject() { return myFrame.getProject(); } @Override public final void uiSettingsChanged(final UISettings uiSettings) { updateToolStripesVisibility(); updateLayout(); } /** * Creates command which adds button into the specified tool stripe. * Command uses copy of passed {@code info} object. * * @param button button which should be added. * @param info window info for the corresponded tool window. * @param comparator which is used to sort buttons within the stripe. * @param finishCallBack invoked when the command is completed. */ @NotNull final FinalizableCommand createAddButtonCmd(final StripeButton button, @NotNull WindowInfoImpl info, @NotNull Comparator<StripeButton> comparator, @NotNull Runnable finishCallBack) { final WindowInfoImpl copiedInfo = info.copy(); myId2Button.put(copiedInfo.getId(), button); return new AddToolStripeButtonCmd(button, copiedInfo, comparator, finishCallBack); } /** * Creates command which shows tool window with specified set of parameters. * Command uses cloned copy of passed {@code info} object. * * @param dirtyMode if {@code true} then JRootPane will not be validated and repainted after adding * the decorator. Moreover in this (dirty) mode animation doesn't work. */ @NotNull final FinalizableCommand createAddDecoratorCmd(@NotNull InternalDecorator decorator, @NotNull WindowInfoImpl info, final boolean dirtyMode, @NotNull Runnable finishCallBack) { final WindowInfoImpl copiedInfo = info.copy(); final String id = copiedInfo.getId(); myDecorator2Info.put(decorator, copiedInfo); myId2Decorator.put(id, decorator); if (info.isDocked()) { WindowInfoImpl sideInfo = getDockedInfoAt(info.getAnchor(), !info.isSplit()); return sideInfo == null ? new AddDockedComponentCmd(decorator, info, dirtyMode, finishCallBack) : new AddAndSplitDockedComponentCmd(decorator, info, dirtyMode, finishCallBack); } else if (info.isSliding()) { return new AddSlidingComponentCmd(decorator, info, dirtyMode, finishCallBack); } else { throw new IllegalArgumentException("Unknown window type: " + info.getType()); } } /** * Creates command which removes tool button from tool stripe. * * @param id {@code ID} of the button to be removed. */ @Nullable final FinalizableCommand createRemoveButtonCmd(@NotNull WindowInfoImpl info, @NotNull String id, @NotNull Runnable finishCallBack) { StripeButton button = myId2Button.remove(id); if (button == null) { return null; } return new RemoveToolStripeButtonCmd(button, info, finishCallBack); } /** * Creates command which hides tool window with specified set of parameters. * * @param dirtyMode if {@code true} then JRootPane will not be validated and repainted after removing * the decorator. Moreover in this (dirty) mode animation doesn't work. */ @NotNull final FinalizableCommand createRemoveDecoratorCmd(@NotNull String id, final boolean dirtyMode, @NotNull Runnable finishCallBack) { final Component decorator = getDecoratorById(id); final WindowInfoImpl info = getDecoratorInfoById(id); myDecorator2Info.remove(decorator); myId2Decorator.remove(id); WindowInfoImpl sideInfo = getDockedInfoAt(info.getAnchor(), !info.isSplit()); if (info.isDocked()) { return sideInfo == null ? new RemoveDockedComponentCmd(info, dirtyMode, finishCallBack) : new RemoveSplitAndDockedComponentCmd(info, dirtyMode, finishCallBack); } else if (info.isSliding()) { return new RemoveSlidingComponentCmd(decorator, info, dirtyMode, finishCallBack); } else { throw new IllegalArgumentException("Unknown window type"); } } /** * Creates command which sets specified document component. * * @param component component to be set. */ @NotNull final FinalizableCommand createSetEditorComponentCmd(final JComponent component, @NotNull Runnable finishCallBack) { return new SetEditorComponentCmd(component, finishCallBack); } @NotNull final FinalizableCommand createUpdateButtonPositionCmd(@NotNull String id, @NotNull Runnable finishCallback) { return new UpdateButtonPositionCmd(id, finishCallback); } @NotNull public final JComponent getMyLayeredPane() { return myLayeredPane; } private InternalDecorator getDecoratorById(final String id) { return myId2Decorator.get(id); } /** * @param id {@code ID} of decorator. * @return {@code WindowInfo} associated with specified window decorator. */ private WindowInfoImpl getDecoratorInfoById(final String id) { return myDecorator2Info.get(myId2Decorator.get(id)); } /** * Sets (docks) specified component to the specified anchor. */ private void setComponent(final JComponent component, @NotNull ToolWindowAnchor anchor, final float weight) { if (ToolWindowAnchor.TOP == anchor) { myVerticalSplitter.setFirstComponent(component); myVerticalSplitter.setFirstSize((int)(myLayeredPane.getHeight() * weight)); } else if (ToolWindowAnchor.LEFT == anchor) { myHorizontalSplitter.setFirstComponent(component); myHorizontalSplitter.setFirstSize((int)(myLayeredPane.getWidth() * weight)); } else if (ToolWindowAnchor.BOTTOM == anchor) { myVerticalSplitter.setLastComponent(component); myVerticalSplitter.setLastSize((int)(myLayeredPane.getHeight() * weight)); } else if (ToolWindowAnchor.RIGHT == anchor) { myHorizontalSplitter.setLastComponent(component); myHorizontalSplitter.setLastSize((int)(myLayeredPane.getWidth() * weight)); } else { LOG.error("unknown anchor: " + anchor); } } private JComponent getComponentAt(@NotNull ToolWindowAnchor anchor) { if (ToolWindowAnchor.TOP == anchor) { return myVerticalSplitter.getFirstComponent(); } else if (ToolWindowAnchor.LEFT == anchor) { return myHorizontalSplitter.getFirstComponent(); } else if (ToolWindowAnchor.BOTTOM == anchor) { return myVerticalSplitter.getLastComponent(); } else if (ToolWindowAnchor.RIGHT == anchor) { return myHorizontalSplitter.getLastComponent(); } else { LOG.error("unknown anchor: " + anchor); return null; } } private float getPreferredSplitProportion(@NotNull String id, float defaultValue) { Float f = myId2SplitProportion.get(id); return f == null ? defaultValue : f; } private WindowInfoImpl getDockedInfoAt(@NotNull ToolWindowAnchor anchor, boolean side) { for (WindowInfoImpl info : myDecorator2Info.values()) { if (info.isVisible() && info.isDocked() && info.getAnchor() == anchor && side == info.isSplit()) { return info; } } return null; } private void setDocumentComponent(final JComponent component) { (myWidescreen ? myVerticalSplitter : myHorizontalSplitter).setInnerComponent(component); } private void updateToolStripesVisibility() { boolean oldVisible = myLeftStripe.isVisible(); final boolean showButtons = !UISettings.getInstance().getHideToolStripes() && !UISettings.getInstance().getPresentationMode(); boolean visible = showButtons || myStripesOverlayed; myLeftStripe.setVisible(visible); myRightStripe.setVisible(visible); myTopStripe.setVisible(visible); myBottomStripe.setVisible(visible); boolean overlayed = !showButtons && myStripesOverlayed; myLeftStripe.setOverlayed(overlayed); myRightStripe.setOverlayed(overlayed); myTopStripe.setOverlayed(overlayed); myBottomStripe.setOverlayed(overlayed); if (oldVisible != visible) { revalidate(); repaint(); } } public int getBottomHeight() { return myBottomStripe.isVisible() ? myBottomStripe.getHeight() : 0; } public boolean isBottomSideToolWindowsVisible() { return getComponentAt(ToolWindowAnchor.BOTTOM) != null; } @Nullable Stripe getStripeFor(String id) { ToolWindow window = myManager.getToolWindow(id); if (window == null) { return null; } final ToolWindowAnchor anchor = myManager.getToolWindow(id).getAnchor(); if (ToolWindowAnchor.TOP == anchor) { return myTopStripe; } if (ToolWindowAnchor.BOTTOM == anchor) { return myBottomStripe; } if (ToolWindowAnchor.LEFT == anchor) { return myLeftStripe; } if (ToolWindowAnchor.RIGHT == anchor) { return myRightStripe; } throw new IllegalArgumentException("Anchor=" + anchor); } @Nullable Stripe getStripeFor(@NotNull Rectangle screenRec, @NotNull Stripe preferred) { if (preferred.containsScreen(screenRec)) { return myStripes.get(myStripes.indexOf(preferred)); } for (Stripe each : myStripes) { if (each.containsScreen(screenRec)) { return myStripes.get(myStripes.indexOf(each)); } } return null; } void startDrag() { for (Stripe each : myStripes) { each.startDrag(); } } void stopDrag() { for (Stripe each : myStripes) { each.stopDrag(); } } void stretchWidth(@NotNull ToolWindow wnd, int value) { stretch(wnd, value); } void stretchHeight(@NotNull ToolWindow wnd, int value) { stretch(wnd, value); } private void stretch(@NotNull ToolWindow wnd, int value) { Pair<Resizer, Component> pair = findResizerAndComponent(wnd); if (pair == null) return; boolean vertical = wnd.getAnchor() == ToolWindowAnchor.TOP || wnd.getAnchor() == ToolWindowAnchor.BOTTOM; int actualSize = (vertical ? pair.second.getHeight() : pair.second.getWidth()) + value; boolean first = wnd.getAnchor() == ToolWindowAnchor.LEFT || wnd.getAnchor() == ToolWindowAnchor.TOP; int maxValue = vertical ? myVerticalSplitter.getMaxSize(first) : myHorizontalSplitter.getMaxSize(first); int minValue = vertical ? myVerticalSplitter.getMinSize(first) : myHorizontalSplitter.getMinSize(first); pair.first.setSize(Math.max(minValue, Math.min(maxValue, actualSize))); } @Nullable private Pair<Resizer, Component> findResizerAndComponent(@NotNull ToolWindow wnd) { if (!wnd.isVisible()) return null; Resizer resizer = null; Component cmp = null; if (wnd.getType() == ToolWindowType.DOCKED) { cmp = getComponentAt(wnd.getAnchor()); if (cmp != null) { if (wnd.getAnchor().isHorizontal()) { resizer = myVerticalSplitter.getFirstComponent() == cmp ? new Resizer.Splitter.FirstComponent(myVerticalSplitter) : new Resizer.Splitter.LastComponent(myVerticalSplitter); } else { resizer = myHorizontalSplitter.getFirstComponent() == cmp ? new Resizer.Splitter.FirstComponent(myHorizontalSplitter) : new Resizer.Splitter.LastComponent(myHorizontalSplitter); } } } else if (wnd.getType() == ToolWindowType.SLIDING) { cmp = wnd.getComponent(); while (cmp != null) { if (cmp.getParent() == myLayeredPane) break; cmp = cmp.getParent(); } if (cmp != null) { if (wnd.getAnchor() == ToolWindowAnchor.TOP) { resizer = new Resizer.LayeredPane.Top(cmp); } else if (wnd.getAnchor() == ToolWindowAnchor.BOTTOM) { resizer = new Resizer.LayeredPane.Bottom(cmp); } else if (wnd.getAnchor() == ToolWindowAnchor.LEFT) { resizer = new Resizer.LayeredPane.Left(cmp); } else if (wnd.getAnchor() == ToolWindowAnchor.RIGHT) { resizer = new Resizer.LayeredPane.Right(cmp); } } } return resizer != null ? Pair.create(resizer, cmp) : null; } private void updateLayout() { UISettings uiSettings = UISettings.getInstance(); if (myWidescreen != uiSettings.getWideScreenSupport()) { JComponent documentComponent = (myWidescreen ? myVerticalSplitter : myHorizontalSplitter).getInnerComponent(); myWidescreen = uiSettings.getWideScreenSupport(); if (myWidescreen) { myVerticalSplitter.setInnerComponent(null); myHorizontalSplitter.setInnerComponent(myVerticalSplitter); } else { myHorizontalSplitter.setInnerComponent(null); myVerticalSplitter.setInnerComponent(myHorizontalSplitter); } myLayeredPane.remove(myWidescreen ? myVerticalSplitter : myHorizontalSplitter); myLayeredPane.add(myWidescreen ? myHorizontalSplitter : myVerticalSplitter, DEFAULT_LAYER); setDocumentComponent(documentComponent); } if (myLeftHorizontalSplit != uiSettings.getLeftHorizontalSplit()) { JComponent component = getComponentAt(ToolWindowAnchor.LEFT); if (component instanceof Splitter) { Splitter splitter = (Splitter)component; InternalDecorator first = (InternalDecorator)splitter.getFirstComponent(); InternalDecorator second = (InternalDecorator)splitter.getSecondComponent(); setComponent(splitter, ToolWindowAnchor.LEFT, ToolWindowAnchor.LEFT.isSplitVertically() ? first.getWindowInfo().getWeight() : first.getWindowInfo().getWeight() + second.getWindowInfo().getWeight()); } myLeftHorizontalSplit = uiSettings.getLeftHorizontalSplit(); } if (myRightHorizontalSplit != uiSettings.getRightHorizontalSplit()) { JComponent component = getComponentAt(ToolWindowAnchor.RIGHT); if (component instanceof Splitter) { Splitter splitter = (Splitter)component; InternalDecorator first = (InternalDecorator)splitter.getFirstComponent(); InternalDecorator second = (InternalDecorator)splitter.getSecondComponent(); setComponent(splitter, ToolWindowAnchor.RIGHT, ToolWindowAnchor.RIGHT.isSplitVertically() ? first.getWindowInfo().getWeight() : first.getWindowInfo().getWeight() + second.getWindowInfo().getWeight()); } myRightHorizontalSplit = uiSettings.getRightHorizontalSplit(); } } public boolean isMaximized(@NotNull ToolWindow wnd) { return myMaximizedProportion != null && myMaximizedProportion.first == wnd; } void setMaximized(@NotNull ToolWindow wnd, boolean maximized) { Pair<Resizer, Component> resizerAndComponent = findResizerAndComponent(wnd); if (resizerAndComponent == null) return; if (!maximized) { ToolWindow maximizedWindow = myMaximizedProportion.first; assert maximizedWindow == wnd; resizerAndComponent.first.setSize(myMaximizedProportion.second); myMaximizedProportion = null; } else { int size = wnd.getAnchor().isHorizontal() ? resizerAndComponent.second.getHeight() : resizerAndComponent.second.getWidth(); stretch(wnd, Short.MAX_VALUE); myMaximizedProportion = Pair.create(wnd, size); } doLayout(); } @FunctionalInterface interface Resizer { void setSize(int size); abstract class Splitter implements Resizer { ThreeComponentsSplitter mySplitter; Splitter(@NotNull ThreeComponentsSplitter splitter) { mySplitter = splitter; } static class FirstComponent extends Splitter { FirstComponent(@NotNull ThreeComponentsSplitter splitter) { super(splitter); } @Override public void setSize(int size) { mySplitter.setFirstSize(size); } } static class LastComponent extends Splitter { LastComponent(@NotNull ThreeComponentsSplitter splitter) { super(splitter); } @Override public void setSize(int size) { mySplitter.setLastSize(size); } } } abstract class LayeredPane implements Resizer { Component myComponent; LayeredPane(@NotNull Component component) { myComponent = component; } @Override public final void setSize(int size) { _setSize(size); if (myComponent.getParent() instanceof JComponent) { JComponent parent = (JComponent)myComponent; parent.revalidate(); parent.repaint(); } } abstract void _setSize(int size); static class Left extends LayeredPane { Left(@NotNull Component component) { super(component); } @Override public void _setSize(int size) { myComponent.setSize(size, myComponent.getHeight()); } } static class Right extends LayeredPane { Right(@NotNull Component component) { super(component); } @Override public void _setSize(int size) { Rectangle bounds = myComponent.getBounds(); int delta = size - bounds.width; bounds.x -= delta; bounds.width += delta; myComponent.setBounds(bounds); } } static class Top extends LayeredPane { Top(@NotNull Component component) { super(component); } @Override public void _setSize(int size) { myComponent.setSize(myComponent.getWidth(), size); } } static class Bottom extends LayeredPane { Bottom(@NotNull Component component) { super(component); } @Override public void _setSize(int size) { Rectangle bounds = myComponent.getBounds(); int delta = size - bounds.height; bounds.y -= delta; bounds.height += delta; myComponent.setBounds(bounds); } } } } private final class AddDockedComponentCmd extends FinalizableCommand { private final JComponent myComponent; private final WindowInfoImpl myInfo; private final boolean myDirtyMode; AddDockedComponentCmd(@NotNull JComponent component, @NotNull WindowInfoImpl info, final boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myComponent = component; myInfo = info; myDirtyMode = dirtyMode; } @Override public final void run() { try { final ToolWindowAnchor anchor = myInfo.getAnchor(); setComponent(myComponent, anchor, normalizeWeigh(myInfo.getWeight())); if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } } finally { finish(); } } } private final class AddAndSplitDockedComponentCmd extends FinalizableCommand { private final JComponent myNewComponent; private final WindowInfoImpl myInfo; private final boolean myDirtyMode; private AddAndSplitDockedComponentCmd(@NotNull JComponent newComponent, @NotNull WindowInfoImpl info, final boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myNewComponent = newComponent; myInfo = info; myDirtyMode = dirtyMode; } @Override public void run() { try { final ToolWindowAnchor anchor = myInfo.getAnchor(); class MySplitter extends OnePixelSplitter implements UISettingsListener { @Override public void uiSettingsChanged(UISettings uiSettings) { if (anchor == ToolWindowAnchor.LEFT) { setOrientation(!uiSettings.getLeftHorizontalSplit()); } else if (anchor == ToolWindowAnchor.RIGHT) { setOrientation(!uiSettings.getRightHorizontalSplit()); } } @Override public String toString() { return "[" + getFirstComponent() + "|" + getSecondComponent() + "]"; } } Splitter splitter = new MySplitter(); splitter.setOrientation(anchor.isSplitVertically()); if (!anchor.isHorizontal()) { splitter.setAllowSwitchOrientationByMouseClick(true); splitter.addPropertyChangeListener(evt -> { if (!Splitter.PROP_ORIENTATION.equals(evt.getPropertyName())) return; boolean isSplitterHorizontalNow = !splitter.isVertical(); UISettings settings = UISettings.getInstance(); if (anchor == ToolWindowAnchor.LEFT) { if (settings.getLeftHorizontalSplit() != isSplitterHorizontalNow) { settings.setLeftHorizontalSplit(isSplitterHorizontalNow); settings.fireUISettingsChanged(); } } if (anchor == ToolWindowAnchor.RIGHT) { if (settings.getRightHorizontalSplit() != isSplitterHorizontalNow) { settings.setRightHorizontalSplit(isSplitterHorizontalNow); settings.fireUISettingsChanged(); } } }); } JComponent c = getComponentAt(anchor); //If all components are hidden for anchor we should find the second component to put in a splitter //Otherwise we add empty splitter if (c == null) { List<String> ids = ToolWindowsPane.this.myManager.getIdsOn(anchor); ids.remove(myInfo.getId()); for (Iterator<String> iterator = ids.iterator(); iterator.hasNext(); ) { String id = iterator.next(); ToolWindow window = myManager.getToolWindow(id); if (window == null || window.isSplitMode() == myInfo.isSplit() || !window.isVisible()) iterator.remove(); } if (!ids.isEmpty()) { InternalDecorator anotherDecorator = getDecoratorById(ids.get(0)); if (anotherDecorator!= null) { c = anotherDecorator; } } if (c == null) { LOG.error("Empty splitter @ " + anchor + " during AddAndSplitDockedComponentCmd for " + myInfo.getId()); } } float newWeight; if (c instanceof InternalDecorator) { InternalDecorator oldComponent = (InternalDecorator)c; WindowInfoImpl oldInfo = oldComponent.getWindowInfo(); if (myInfo.isSplit()) { splitter.setFirstComponent(oldComponent); splitter.setSecondComponent(myNewComponent); float proportion = getPreferredSplitProportion(oldInfo.getId(), normalizeWeigh(oldInfo.getSideWeight() / (oldInfo.getSideWeight() + myInfo.getSideWeight()))); splitter.setProportion(proportion); if (!anchor.isHorizontal() && !anchor.isSplitVertically()) { newWeight = normalizeWeigh(oldInfo.getWeight() + myInfo.getWeight()); } else { newWeight = normalizeWeigh(oldInfo.getWeight()); } } else { splitter.setFirstComponent(myNewComponent); splitter.setSecondComponent(oldComponent); splitter.setProportion(normalizeWeigh(myInfo.getSideWeight())); if (!anchor.isHorizontal() && !anchor.isSplitVertically()) { newWeight = normalizeWeigh(oldInfo.getWeight() + myInfo.getWeight()); } else { newWeight = normalizeWeigh(myInfo.getWeight()); } } } else { newWeight = normalizeWeigh(myInfo.getWeight()); } setComponent(splitter, anchor, newWeight); if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } } finally { finish(); } } } private final class AddSlidingComponentCmd extends FinalizableCommand { private final Component myComponent; private final WindowInfoImpl myInfo; private final boolean myDirtyMode; AddSlidingComponentCmd(@NotNull Component component, @NotNull WindowInfoImpl info, final boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myComponent = component; myInfo = info; myDirtyMode = dirtyMode; } @Override public final void run() { try { // Show component. if (!myDirtyMode && UISettings.getInstance().getAnimateWindows() && !RemoteDesktopService.isRemoteSession()) { // Prepare top image. This image is scrolling over bottom image. final Image topImage = myLayeredPane.getTopImage(); Rectangle bounds = myComponent.getBounds(); useSafely(topImage.getGraphics(), topGraphics -> { myLayeredPane.add(myComponent, JLayeredPane.PALETTE_LAYER); myLayeredPane.moveToFront(myComponent); myLayeredPane.setBoundsInPaletteLayer(myComponent, myInfo.getAnchor(), myInfo.getWeight()); myComponent.paint(topGraphics); myLayeredPane.remove(myComponent); }); // Prepare bottom image. final Image bottomImage = myLayeredPane.getBottomImage(); Point2D bottomImageOffset = PaintUtil.getFractOffsetInRootPane(myLayeredPane); useSafely(bottomImage.getGraphics(), bottomGraphics -> { bottomGraphics.setClip(0, 0, bounds.width, bounds.height); bottomGraphics.translate(bottomImageOffset.getX() - bounds.x, bottomImageOffset.getY() - bounds.y); myLayeredPane.paint(bottomGraphics); }); // Start animation. final Surface surface = new Surface(topImage, bottomImage, PaintUtil.negate(bottomImageOffset), 1, myInfo.getAnchor(), UISettings.ANIMATION_DURATION); myLayeredPane.add(surface, JLayeredPane.PALETTE_LAYER); surface.setBounds(bounds); myLayeredPane.validate(); myLayeredPane.repaint(); surface.runMovement(); myLayeredPane.remove(surface); myLayeredPane.add(myComponent, JLayeredPane.PALETTE_LAYER); } else { // not animated myLayeredPane.add(myComponent, JLayeredPane.PALETTE_LAYER); myLayeredPane.setBoundsInPaletteLayer(myComponent, myInfo.getAnchor(), myInfo.getWeight()); } if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } } finally { finish(); } } } private final class AddToolStripeButtonCmd extends FinalizableCommand { private final StripeButton myButton; private final WindowInfoImpl myInfo; private final Comparator<StripeButton> myComparator; AddToolStripeButtonCmd(final StripeButton button, @NotNull WindowInfoImpl info, @NotNull Comparator<StripeButton> comparator, @NotNull Runnable finishCallBack) { super(finishCallBack); myButton = button; myInfo = info; myComparator = comparator; } @Override public final void run() { try { final ToolWindowAnchor anchor = myInfo.getAnchor(); if (ToolWindowAnchor.TOP == anchor) { myTopStripe.addButton(myButton, myComparator); } else if (ToolWindowAnchor.LEFT == anchor) { myLeftStripe.addButton(myButton, myComparator); } else if (ToolWindowAnchor.BOTTOM == anchor) { myBottomStripe.addButton(myButton, myComparator); } else if (ToolWindowAnchor.RIGHT == anchor) { myRightStripe.addButton(myButton, myComparator); } else { LOG.error("unknown anchor: " + anchor); } validate(); repaint(); } finally { finish(); } } } private final class RemoveToolStripeButtonCmd extends FinalizableCommand { private final StripeButton myButton; private final WindowInfoImpl myInfo; private final ToolWindowAnchor myAnchor; RemoveToolStripeButtonCmd(@NotNull StripeButton button, @NotNull WindowInfoImpl info, @NotNull Runnable finishCallBack) { super(finishCallBack); myButton = button; myInfo = info; myAnchor = myInfo.getAnchor(); } @Override public final void run() { try { if (ToolWindowAnchor.TOP == myAnchor) { myTopStripe.removeButton(myButton); } else if (ToolWindowAnchor.LEFT == myAnchor) { myLeftStripe.removeButton(myButton); } else if (ToolWindowAnchor.BOTTOM == myAnchor) { myBottomStripe.removeButton(myButton); } else if (ToolWindowAnchor.RIGHT == myAnchor) { myRightStripe.removeButton(myButton); } else { LOG.error("unknown anchor: " + myAnchor); } validate(); repaint(); } finally { finish(); } } } private final class RemoveDockedComponentCmd extends FinalizableCommand { private final WindowInfoImpl myInfo; private final boolean myDirtyMode; RemoveDockedComponentCmd(@NotNull WindowInfoImpl info, final boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myInfo = info; myDirtyMode = dirtyMode; } @Override public final void run() { try { setComponent(null, myInfo.getAnchor(), 0); if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } transferFocus(); } finally { finish(); } } } private final class RemoveSplitAndDockedComponentCmd extends FinalizableCommand { private final WindowInfoImpl myInfo; private final boolean myDirtyMode; private RemoveSplitAndDockedComponentCmd(@NotNull WindowInfoImpl info, boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myInfo = info; myDirtyMode = dirtyMode; } @Override public void run() { try { ToolWindowAnchor anchor = myInfo.getAnchor(); JComponent c = getComponentAt(anchor); if (c instanceof Splitter) { Splitter splitter = (Splitter)c; final InternalDecorator component = myInfo.isSplit() ? (InternalDecorator)splitter.getFirstComponent() : (InternalDecorator)splitter.getSecondComponent(); if (myInfo.isSplit() && component != null) { myId2SplitProportion.put(component.getWindowInfo().getId(), splitter.getProportion()); } setComponent(component, anchor, component != null ? component.getWindowInfo().getWeight() : 0); } else { setComponent(null, anchor, 0); } if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } transferFocus(); } finally { finish(); } } } private final class RemoveSlidingComponentCmd extends FinalizableCommand { private final Component myComponent; private final WindowInfoImpl myInfo; private final boolean myDirtyMode; RemoveSlidingComponentCmd(Component component, @NotNull WindowInfoImpl info, boolean dirtyMode, @NotNull Runnable finishCallBack) { super(finishCallBack); myComponent = component; myInfo = info; myDirtyMode = dirtyMode; } @Override public final void run() { try { final UISettings uiSettings = UISettings.getInstance(); if (!myDirtyMode && uiSettings.getAnimateWindows() && !RemoteDesktopService.isRemoteSession()) { final Rectangle bounds = myComponent.getBounds(); // Prepare top image. This image is scrolling over bottom image. It contains // picture of component is being removed. final Image topImage = myLayeredPane.getTopImage(); useSafely(topImage.getGraphics(), topGraphics -> myComponent.paint(topGraphics)); // Prepare bottom image. This image contains picture of component that is located // under the component to is being removed. final Image bottomImage = myLayeredPane.getBottomImage(); Point2D bottomImageOffset = PaintUtil.getFractOffsetInRootPane(myLayeredPane); useSafely(bottomImage.getGraphics(), bottomGraphics -> { myLayeredPane.remove(myComponent); bottomGraphics.clipRect(0, 0, bounds.width, bounds.height); bottomGraphics.translate(bottomImageOffset.getX() - bounds.x, bottomImageOffset.getY() - bounds.y); myLayeredPane.paint(bottomGraphics); }); // Remove component from the layered pane and start animation. final Surface surface = new Surface(topImage, bottomImage, PaintUtil.negate(bottomImageOffset), -1, myInfo.getAnchor(), UISettings.ANIMATION_DURATION); myLayeredPane.add(surface, JLayeredPane.PALETTE_LAYER); surface.setBounds(bounds); myLayeredPane.validate(); myLayeredPane.repaint(); surface.runMovement(); myLayeredPane.remove(surface); } else { // not animated myLayeredPane.remove(myComponent); } if (!myDirtyMode) { myLayeredPane.validate(); myLayeredPane.repaint(); } transferFocus(); } finally { finish(); } } } private final class SetEditorComponentCmd extends FinalizableCommand { private final JComponent myComponent; SetEditorComponentCmd(final JComponent component, @NotNull Runnable finishCallBack) { super(finishCallBack); myComponent = component; } @Override public void run() { try { setDocumentComponent(myComponent); myLayeredPane.validate(); myLayeredPane.repaint(); } finally { finish(); } } } private final class UpdateButtonPositionCmd extends FinalizableCommand { private final String myId; private UpdateButtonPositionCmd(@NotNull String id, @NotNull Runnable finishCallBack) { super(finishCallBack); myId = id; } @Override public void run() { try { StripeButton stripeButton = myId2Button.get(myId); if (stripeButton == null) { return; } WindowInfoImpl info = stripeButton.getWindowInfo(); ToolWindowAnchor anchor = info.getAnchor(); if (ToolWindowAnchor.TOP == anchor) { myTopStripe.revalidate(); } else if (ToolWindowAnchor.LEFT == anchor) { myLeftStripe.revalidate(); } else if (ToolWindowAnchor.BOTTOM == anchor) { myBottomStripe.revalidate(); } else if (ToolWindowAnchor.RIGHT == anchor) { myRightStripe.revalidate(); } else { LOG.error("unknown anchor: " + anchor); } } finally { finish(); } } } private static class ImageRef extends SoftReference<BufferedImage> { @Nullable private BufferedImage myStrongRef; ImageRef(@NotNull BufferedImage image) { super(image); myStrongRef = image; } @Override public BufferedImage get() { if (myStrongRef != null) { BufferedImage img = myStrongRef; myStrongRef = null; // drop on first request return img; } return super.get(); } } private static class ImageCache extends ScaleContext.Cache<ImageRef> { ImageCache(@NotNull Function<? super ScaleContext, ? extends ImageRef> imageProvider) { super(imageProvider); } public BufferedImage get(@NotNull ScaleContext ctx) { ImageRef ref = getOrProvide(ctx); BufferedImage image = SoftReference.dereference(ref); if (image != null) return image; clear(); // clear to recalculate the image return get(ctx); // first recalculated image will be non-null } } private final class MyLayeredPane extends JBLayeredPane { private final Function<ScaleContext, ImageRef> myImageProvider = __ -> { int width = Math.max(Math.max(1, getWidth()), myFrame.getWidth()); int height = Math.max(Math.max(1, getHeight()), myFrame.getHeight()); return new ImageRef(UIUtil.createImage(getGraphicsConfiguration(), width, height, BufferedImage.TYPE_INT_RGB)); }; /* * These images are used to perform animated showing and hiding of components. * They are the member for performance reason. */ private final ImageCache myBottomImageCache = new ImageCache(myImageProvider); private final ImageCache myTopImageCache = new ImageCache(myImageProvider); MyLayeredPane(@NotNull JComponent splitter) { setOpaque(false); add(splitter, JLayeredPane.DEFAULT_LAYER); } final Image getBottomImage() { return myBottomImageCache.get(ScaleContext.create(this)); } final Image getTopImage() { return myTopImageCache.get(ScaleContext.create(this)); } /** * When component size becomes larger then bottom and top images should be enlarged. */ @Override public void doLayout() { final int width = getWidth(); final int height = getHeight(); if (width < 0 || height < 0) { return; } // Resize component at the DEFAULT layer. It should be only on component in that layer Component[] components = getComponentsInLayer(JLayeredPane.DEFAULT_LAYER.intValue()); LOG.assertTrue(components.length <= 1); for (final Component component : components) { component.setBounds(0, 0, getWidth(), getHeight()); } // Resize components at the PALETTE layer components = getComponentsInLayer(JLayeredPane.PALETTE_LAYER.intValue()); for (final Component component : components) { if (!(component instanceof InternalDecorator)) { continue; } final WindowInfoImpl info = myDecorator2Info.get(component); // In normal situation info is not null. But sometimes Swing sends resize // event to removed component. See SCR #19566. if (info == null) { continue; } float weight = info.getAnchor().isHorizontal() ? (float)component.getHeight() / getHeight() : (float)component.getWidth() / getWidth(); setBoundsInPaletteLayer(component, info.getAnchor(), weight); } } final void setBoundsInPaletteLayer(@NotNull Component component, @NotNull ToolWindowAnchor anchor, float weight) { if (weight < .0f) { weight = WindowInfoImpl.DEFAULT_WEIGHT; } else if (weight > 1.0f) { weight = 1.0f; } if (ToolWindowAnchor.TOP == anchor) { component.setBounds(0, 0, getWidth(), (int)(getHeight() * weight + .5f)); } else if (ToolWindowAnchor.LEFT == anchor) { component.setBounds(0, 0, (int)(getWidth() * weight + .5f), getHeight()); } else if (ToolWindowAnchor.BOTTOM == anchor) { final int height = (int)(getHeight() * weight + .5f); component.setBounds(0, getHeight() - height, getWidth(), height); } else if (ToolWindowAnchor.RIGHT == anchor) { final int width = (int)(getWidth() * weight + .5f); component.setBounds(getWidth() - width, 0, width, getHeight()); } else { LOG.error("unknown anchor " + anchor); } } } void setStripesOverlayed(boolean stripesOverlayed) { myStripesOverlayed = stripesOverlayed; updateToolStripesVisibility(); } @Override public void dispose() { } private static float normalizeWeigh(final float weight) { if (weight <= 0) return WindowInfoImpl.DEFAULT_WEIGHT; if (weight >= 1) return 1 - WindowInfoImpl.DEFAULT_WEIGHT; return weight; } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.journal.readerwriter.singlefile; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.util.Map; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.Attribute; import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; import org.fcrepo.server.errors.ModuleInitializationException; import org.fcrepo.server.journal.JournalException; import org.fcrepo.server.journal.JournalReader; import org.fcrepo.server.journal.ServerInterface; import org.fcrepo.server.journal.entry.ConsumerJournalEntry; import org.fcrepo.server.journal.recoverylog.JournalRecoveryLog; /** * A rudimentary implementation of JournalReader that just reads all entries * from a single Journal file. * <p> * Useful only for System tests. * * @author Jim Blake */ public class SingleFileJournalReader extends JournalReader implements SingleFileJournalConstants { private final File journalFile; private final XMLEventReader reader; private boolean open = true; private boolean advancedPastHeader = false; /** * Get the name of the journal file from the server parameters, wrap it in * an XMLEventReader, and advance past the document header to the first * JournalEntry. * * @throws JournalException */ public SingleFileJournalReader(Map<String, String> parameters, String role, JournalRecoveryLog recoveryLog, ServerInterface server) throws ModuleInitializationException, JournalException { super(parameters, role, recoveryLog, server); recoveryLog.log("Using a SingleFileJournalReader"); if (!parameters.containsKey(PARAMETER_JOURNAL_FILENAME)) { throw new ModuleInitializationException("Parameter '" + PARAMETER_JOURNAL_FILENAME + "' not set.", role); } String filename = parameters.get(PARAMETER_JOURNAL_FILENAME); journalFile = new File(filename); if (!journalFile.exists()) { throw new ModuleInitializationException("Journal file '" + journalFile.getPath() + "' does not exist.", role); } if (!journalFile.isFile()) { throw new ModuleInitializationException("Journal file '" + journalFile.getPath() + "' is not a file.", role); } if (!journalFile.canRead()) { throw new ModuleInitializationException("Journal file '" + journalFile.getPath() + "' is not readable.", role); } try { XMLInputFactory factory = XMLInputFactory.newInstance(); reader = factory.createXMLEventReader(new FileReader(journalFile)); } catch (FileNotFoundException e) { throw new ModuleInitializationException("Problem dumping file", role); } catch (XMLStreamException e) { throw new ModuleInitializationException("Error opening XML Event reader on Journal file '" + journalFile .getPath() + "'", role, e); } } /** * Advance past the document header to the first JournalEntry. */ private void advanceIntoFile() throws XMLStreamException, JournalException { XMLEvent event = reader.nextEvent(); if (!event.isStartDocument()) { throw new JournalException("Expecting XML document header, but event was '" + event + "'"); } event = reader.nextTag(); if (!isStartTagEvent(event, QNAME_TAG_JOURNAL)) { throw new JournalException("Expecting FedoraJournal start tag, but event was '" + event + "'"); } String hash = getOptionalAttributeValue(event.asStartElement(), QNAME_ATTR_REPOSITORY_HASH); checkRepositoryHash(hash); } /** * Advance past any white space, and then see whether we have any more * JournalEntry tags. If we don't, just return null. */ @Override public synchronized ConsumerJournalEntry readJournalEntry() throws JournalException, XMLStreamException { if (!open) { return null; } if (!advancedPastHeader) { advanceIntoFile(); advancedPastHeader = true; } XMLEvent next = reader.peek(); // advance past any whitespace events. while (next.isCharacters() && next.asCharacters().isWhiteSpace()) { reader.nextEvent(); next = reader.peek(); } if (isStartTagEvent(next, QNAME_TAG_JOURNAL_ENTRY)) { String identifier = peekAtJournalEntryIdentifier(); ConsumerJournalEntry journalEntry = super.readJournalEntry(reader); journalEntry.setIdentifier(identifier); return journalEntry; } else if (isEndTagEvent(next, QNAME_TAG_JOURNAL)) { return null; } else { throw getNotNextMemberOrEndOfGroupException(QNAME_TAG_JOURNAL, QNAME_TAG_JOURNAL_ENTRY, next); } } /** * Create an identifier string for the Journal Entry, so we can easily * connect the entries in the Recovery Logger with those in the Journal. Call * this before calling * {@link JournalReader#readJournalEntry(XMLEventReader)}, because the * reader is positioned at the beginning of the JournalEntry, so a peek() * will give us the start tag, with the info we need. */ private String peekAtJournalEntryIdentifier() throws XMLStreamException { XMLEvent event = reader.peek(); String timeString = "unknown"; if (event.isStartElement()) { StartElement start = event.asStartElement(); Attribute timeStamp = start.getAttributeByName(QNAME_ATTR_TIMESTAMP); if (timeStamp != null) { timeString = timeStamp.getValue(); } } return "entry='" + timeString + "'"; } /** * On the first call, Just close the reader. */ @Override public synchronized void shutdown() throws JournalException { try { if (open) { reader.close(); open = false; } } catch (XMLStreamException e) { throw new JournalException(e); } } @Override public String toString() { return super.toString() + ", journalFile='" + journalFile.getPath() + "'"; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.04.23 at 01:20:51 PM CDT // package com.mastercard.api.moneysend.v2.mapping.domain; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="RequestId" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="Mappings"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Mapping" maxOccurs="unbounded" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="MappingId" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="SubscriberId" type="{http://www.w3.org/2001/XMLSchema}Long"/> * &lt;element name="AccountUsage" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DefaultIndicator" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Alias" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ICA" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="AccountNumber" type="{http://www.w3.org/2001/XMLSchema}Long"/> * &lt;element name="CardholderFullName"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CardholderFirstName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderMiddleName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderLastName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Address"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Line1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="City" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CountrySubdivision" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="PostalCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="Country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ReceivingEligibility" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Eligible" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Currency"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Country"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCountryCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCountryCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Brand"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AcceptanceBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ProductBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ExpiryDate" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "requestId", "mappings" }) @XmlRootElement(name = "InquireMapping") public class InquireMapping { @XmlElement(name = "RequestId") protected Integer requestId; @XmlElement(name = "Mappings", required = true) protected Mappings mappings; /** * Gets the value of the requestId property. * */ public Integer getRequestId() { return requestId; } /** * Sets the value of the requestId property. * */ public void setRequestId(Integer value) { this.requestId = value; } /** * Gets the value of the mappings property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings } * */ public Mappings getMappings() { return mappings; } /** * Sets the value of the mappings property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings } * */ public void setMappings(Mappings value) { this.mappings = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Mapping" maxOccurs="unbounded" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="MappingId" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="SubscriberId" type="{http://www.w3.org/2001/XMLSchema}Long"/> * &lt;element name="AccountUsage" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DefaultIndicator" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Alias" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ICA" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="AccountNumber" type="{http://www.w3.org/2001/XMLSchema}Long"/> * &lt;element name="CardholderFullName"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CardholderFirstName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderMiddleName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderLastName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Address"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Line1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="City" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CountrySubdivision" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="PostalCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="Country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ReceivingEligibility" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Eligible" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Currency"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Country"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCountryCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCountryCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Brand"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AcceptanceBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ProductBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ExpiryDate" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "mapping" }) public static class Mappings { @XmlElement(name = "Mapping") protected List<Mapping> mapping; /** * Gets the value of the mapping property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the mapping property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMapping().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping } * * */ public List<Mapping> getMapping() { if (mapping == null) { mapping = new ArrayList<Mapping>(); } return this.mapping; } public void setMapping(List<Mapping> mapping) { this.mapping = mapping; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="MappingId" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="SubscriberId" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="AccountUsage" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="DefaultIndicator" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Alias" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ICA" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="AccountNumber" type="{http://www.w3.org/2001/XMLSchema}Long"/> * &lt;element name="CardholderFullName"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CardholderFirstName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderMiddleName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderLastName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Address"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Line1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="City" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CountrySubdivision" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="PostalCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="Country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ReceivingEligibility" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Eligible" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Currency"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Country"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCountryCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCountryCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Brand"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AcceptanceBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ProductBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="ExpiryDate" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "mappingId", "subscriberId", "accountUsage", "defaultIndicator", "alias", "ica", "accountNumber", "cardholderFullName", "address", "receivingEligibility", "expiryDate" }) public static class Mapping { @XmlElement(name = "MappingId") protected Integer mappingId; @XmlElement(name = "SubscriberId") protected String subscriberId; @XmlElement(name = "AccountUsage", required = true) protected String accountUsage; @XmlElement(name = "DefaultIndicator", required = true) protected String defaultIndicator; @XmlElement(name = "Alias", required = true) protected String alias; @XmlElement(name = "ICA") protected Integer ica; @XmlElement(name = "AccountNumber") protected Long accountNumber; @XmlElement(name = "CardholderFullName", required = true) protected CardholderFullName cardholderFullName; @XmlElement(name = "Address", required = true) protected Address address; @XmlElement(name = "ReceivingEligibility") protected ReceivingEligibility receivingEligibility; @XmlElement(name = "ExpiryDate") protected Integer expiryDate; /** * Gets the value of the mappingId property. * */ public Integer getMappingId() { return mappingId; } /** * Sets the value of the mappingId property. * */ public void setMappingId(Integer value) { this.mappingId = value; } /** * Gets the value of the subscriberId property. * */ public String getSubscriberId() { return subscriberId; } /** * Sets the value of the subscriberId property. * */ public void setSubscriberId(String value) { this.subscriberId = value; } /** * Gets the value of the accountUsage property. * * @return * possible object is * {@link String } * */ public String getAccountUsage() { return accountUsage; } /** * Sets the value of the accountUsage property. * * @param value * allowed object is * {@link String } * */ public void setAccountUsage(String value) { this.accountUsage = value; } /** * Gets the value of the defaultIndicator property. * * @return * possible object is * {@link String } * */ public String getDefaultIndicator() { return defaultIndicator; } /** * Sets the value of the defaultIndicator property. * * @param value * allowed object is * {@link String } * */ public void setDefaultIndicator(String value) { this.defaultIndicator = value; } /** * Gets the value of the alias property. * * @return * possible object is * {@link String } * */ public String getAlias() { return alias; } /** * Sets the value of the alias property. * * @param value * allowed object is * {@link String } * */ public void setAlias(String value) { this.alias = value; } /** * Gets the value of the ica property. * */ public Integer getICA() { return ica; } /** * Sets the value of the ica property. * */ public void setICA(Integer value) { this.ica = value; } /** * Gets the value of the accountNumber property. * */ public Long getAccountNumber() { return accountNumber; } /** * Sets the value of the accountNumber property. * */ public void setAccountNumber(Long value) { this.accountNumber = value; } /** * Gets the value of the cardholderFullName property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.CardholderFullName } * */ public CardholderFullName getCardholderFullName() { return cardholderFullName; } /** * Sets the value of the cardholderFullName property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.CardholderFullName } * */ public void setCardholderFullName(CardholderFullName value) { this.cardholderFullName = value; } /** * Gets the value of the address property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.Address } * */ public Address getAddress() { return address; } /** * Sets the value of the address property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.Address } * */ public void setAddress(Address value) { this.address = value; } /** * Gets the value of the receivingEligibility property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility } * */ public ReceivingEligibility getReceivingEligibility() { return receivingEligibility; } /** * Sets the value of the receivingEligibility property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility } * */ public void setReceivingEligibility(ReceivingEligibility value) { this.receivingEligibility = value; } /** * Gets the value of the expiryDate property. * */ public Integer getExpiryDate() { return expiryDate; } /** * Sets the value of the expiryDate property. * */ public void setExpiryDate(Integer value) { this.expiryDate = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Line1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="City" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CountrySubdivision" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="PostalCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;element name="Country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "line1", "city", "countrySubdivision", "postalCode", "country" }) public static class Address { @XmlElement(name = "Line1", required = true) protected String line1; @XmlElement(name = "City", required = true) protected String city; @XmlElement(name = "CountrySubdivision", required = true) protected String countrySubdivision; @XmlElement(name = "PostalCode") protected Integer postalCode; @XmlElement(name = "Country", required = true) protected String country; /** * Gets the value of the line1 property. * * @return * possible object is * {@link String } * */ public String getLine1() { return line1; } /** * Sets the value of the line1 property. * * @param value * allowed object is * {@link String } * */ public void setLine1(String value) { this.line1 = value; } /** * Gets the value of the city property. * * @return * possible object is * {@link String } * */ public String getCity() { return city; } /** * Sets the value of the city property. * * @param value * allowed object is * {@link String } * */ public void setCity(String value) { this.city = value; } /** * Gets the value of the countrySubdivision property. * * @return * possible object is * {@link String } * */ public String getCountrySubdivision() { return countrySubdivision; } /** * Sets the value of the countrySubdivision property. * * @param value * allowed object is * {@link String } * */ public void setCountrySubdivision(String value) { this.countrySubdivision = value; } /** * Gets the value of the postalCode property. * */ public Integer getPostalCode() { return postalCode; } /** * Sets the value of the postalCode property. * */ public void setPostalCode(Integer value) { this.postalCode = value; } /** * Gets the value of the country property. * * @return * possible object is * {@link String } * */ public String getCountry() { return country; } /** * Sets the value of the country property. * * @param value * allowed object is * {@link String } * */ public void setCountry(String value) { this.country = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CardholderFirstName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderMiddleName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="CardholderLastName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "cardholderFirstName", "cardholderMiddleName", "cardholderLastName" }) public static class CardholderFullName { @XmlElement(name = "CardholderFirstName", required = true) protected String cardholderFirstName; @XmlElement(name = "CardholderMiddleName", required = true) protected String cardholderMiddleName; @XmlElement(name = "CardholderLastName", required = true) protected String cardholderLastName; /** * Gets the value of the cardholderFirstName property. * * @return * possible object is * {@link String } * */ public String getCardholderFirstName() { return cardholderFirstName; } /** * Sets the value of the cardholderFirstName property. * * @param value * allowed object is * {@link String } * */ public void setCardholderFirstName(String value) { this.cardholderFirstName = value; } /** * Gets the value of the cardholderMiddleName property. * * @return * possible object is * {@link String } * */ public String getCardholderMiddleName() { return cardholderMiddleName; } /** * Sets the value of the cardholderMiddleName property. * * @param value * allowed object is * {@link String } * */ public void setCardholderMiddleName(String value) { this.cardholderMiddleName = value; } /** * Gets the value of the cardholderLastName property. * * @return * possible object is * {@link String } * */ public String getCardholderLastName() { return cardholderLastName; } /** * Sets the value of the cardholderLastName property. * * @param value * allowed object is * {@link String } * */ public void setCardholderLastName(String value) { this.cardholderLastName = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Eligible" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Currency"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Country"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCountryCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCountryCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Brand"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AcceptanceBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ProductBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "eligible", "currency", "country", "brand" }) public static class ReceivingEligibility { @XmlElement(name = "Eligible", required = true) protected String eligible; @XmlElement(name = "Currency", required = true) protected Currency currency; @XmlElement(name = "Country", required = true) protected Country country; @XmlElement(name = "Brand", required = true) protected Brand brand; /** * Gets the value of the eligible property. * * @return * possible object is * {@link String } * */ public String getEligible() { return eligible; } /** * Sets the value of the eligible property. * * @param value * allowed object is * {@link String } * */ public void setEligible(String value) { this.eligible = value; } /** * Gets the value of the currency property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Currency } * */ public Currency getCurrency() { return currency; } /** * Sets the value of the currency property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Currency } * */ public void setCurrency(Currency value) { this.currency = value; } /** * Gets the value of the country property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Country } * */ public Country getCountry() { return country; } /** * Sets the value of the country property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Country } * */ public void setCountry(Country value) { this.country = value; } /** * Gets the value of the brand property. * * @return * possible object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Brand } * */ public Brand getBrand() { return brand; } /** * Sets the value of the brand property. * * @param value * allowed object is * {@link com.mastercard.api.moneysend.v2.mapping.domain.InquireMapping.Mappings.Mapping.ReceivingEligibility.Brand } * */ public void setBrand(Brand value) { this.brand = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AcceptanceBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="ProductBrandCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "acceptanceBrandCode", "productBrandCode" }) public static class Brand { @XmlElement(name = "AcceptanceBrandCode", required = true) protected String acceptanceBrandCode; @XmlElement(name = "ProductBrandCode", required = true) protected String productBrandCode; /** * Gets the value of the acceptanceBrandCode property. * * @return * possible object is * {@link String } * */ public String getAcceptanceBrandCode() { return acceptanceBrandCode; } /** * Sets the value of the acceptanceBrandCode property. * * @param value * allowed object is * {@link String } * */ public void setAcceptanceBrandCode(String value) { this.acceptanceBrandCode = value; } /** * Gets the value of the productBrandCode property. * * @return * possible object is * {@link String } * */ public String getProductBrandCode() { return productBrandCode; } /** * Sets the value of the productBrandCode property. * * @param value * allowed object is * {@link String } * */ public void setProductBrandCode(String value) { this.productBrandCode = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCountryCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCountryCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "alphaCountryCode", "numericCountryCode" }) public static class Country { @XmlElement(name = "AlphaCountryCode", required = true) protected String alphaCountryCode; @XmlElement(name = "NumericCountryCode") protected Integer numericCountryCode; /** * Gets the value of the alphaCountryCode property. * * @return * possible object is * {@link String } * */ public String getAlphaCountryCode() { return alphaCountryCode; } /** * Sets the value of the alphaCountryCode property. * * @param value * allowed object is * {@link String } * */ public void setAlphaCountryCode(String value) { this.alphaCountryCode = value; } /** * Gets the value of the numericCountryCode property. * */ public Integer getNumericCountryCode() { return numericCountryCode; } /** * Sets the value of the numericCountryCode property. * */ public void setNumericCountryCode(Integer value) { this.numericCountryCode = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="AlphaCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="NumericCurrencyCode" type="{http://www.w3.org/2001/XMLSchema}Integer"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "alphaCurrencyCode", "numericCurrencyCode" }) public static class Currency { @XmlElement(name = "AlphaCurrencyCode", required = true) protected String alphaCurrencyCode; @XmlElement(name = "NumericCurrencyCode") protected Integer numericCurrencyCode; /** * Gets the value of the alphaCurrencyCode property. * * @return * possible object is * {@link String } * */ public String getAlphaCurrencyCode() { return alphaCurrencyCode; } /** * Sets the value of the alphaCurrencyCode property. * * @param value * allowed object is * {@link String } * */ public void setAlphaCurrencyCode(String value) { this.alphaCurrencyCode = value; } /** * Gets the value of the numericCurrencyCode property. * */ public Integer getNumericCurrencyCode() { return numericCurrencyCode; } /** * Sets the value of the numericCurrencyCode property. * */ public void setNumericCurrencyCode(Integer value) { this.numericCurrencyCode = value; } } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.spy; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.util.HashMap; import java.util.Map; import java.util.SortedMap; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.EditLogValidation; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.PathUtils; import org.apache.log4j.Level; import org.junit.Test; import com.google.common.collect.Maps; import com.google.common.io.Files; public class TestFSEditLogLoader { static { ((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); ((Log4JLogger)FSEditLogLoader.LOG).getLogger().setLevel(Level.ALL); } private static final File TEST_DIR = PathUtils.getTestDir(TestFSEditLogLoader.class); private static final int NUM_DATA_NODES = 0; @Test public void testDisplayRecentEditLogOpCodes() throws IOException { // start a cluster Configuration conf = new HdfsConfiguration(); MiniDFSCluster cluster = null; FileSystem fileSys = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES) .enableManagedDfsDirsRedundancy(false).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); for (int i = 0; i < 20; i++) { fileSys.mkdirs(new Path("/tmp/tmp" + i)); } StorageDirectory sd = fsimage.getStorage().dirIterator(NameNodeDirType.EDITS).next(); cluster.shutdown(); File editFile = FSImageTestUtil.findLatestEditsLog(sd).getFile(); assertTrue("Should exist: " + editFile, editFile.exists()); // Corrupt the edits file. long fileLen = editFile.length(); RandomAccessFile rwf = new RandomAccessFile(editFile, "rw"); rwf.seek(fileLen - 40); for (int i = 0; i < 20; i++) { rwf.write(FSEditLogOpCodes.OP_DELETE.getOpCode()); } rwf.close(); StringBuilder bld = new StringBuilder(); bld.append("^Error replaying edit log at offset \\d+. "); bld.append("Expected transaction ID was \\d+\n"); bld.append("Recent opcode offsets: (\\d+\\s*){4}$"); try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES) .enableManagedDfsDirsRedundancy(false).format(false).build(); fail("should not be able to start"); } catch (IOException e) { assertTrue("error message contains opcodes message", e.getMessage().matches(bld.toString())); } } /** * Test that, if the NN restarts with a new minimum replication, * any files created with the old replication count will get * automatically bumped up to the new minimum upon restart. */ @Test public void testReplicationAdjusted() throws Exception { // start a cluster Configuration conf = new HdfsConfiguration(); // Replicate and heartbeat fast to shave a few seconds off test conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); // Create a file with replication count 1 Path p = new Path("/testfile"); DFSTestUtil.createFile(fs, p, 10, /*repl*/ (short)1, 1); DFSTestUtil.waitReplication(fs, p, (short)1); // Shut down and restart cluster with new minimum replication of 2 cluster.shutdown(); cluster = null; conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY, 2); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .format(false).build(); cluster.waitActive(); fs = cluster.getFileSystem(); // The file should get adjusted to replication 2 when // the edit log is replayed. DFSTestUtil.waitReplication(fs, p, (short)2); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Corrupt the byte at the given offset in the given file, * by subtracting 1 from it. */ private void corruptByteInFile(File file, long offset) throws IOException { RandomAccessFile raf = new RandomAccessFile(file, "rw"); try { raf.seek(offset); int origByte = raf.read(); raf.seek(offset); raf.writeByte(origByte - 1); } finally { IOUtils.closeStream(raf); } } /** * Truncate the given file to the given length */ private void truncateFile(File logFile, long newLength) throws IOException { RandomAccessFile raf = new RandomAccessFile(logFile, "rw"); raf.setLength(newLength); raf.close(); } /** * Return the length of bytes in the given file after subtracting * the trailer of 0xFF (OP_INVALID)s. * This seeks to the end of the file and reads chunks backwards until * it finds a non-0xFF byte. * @throws IOException if the file cannot be read */ private static long getNonTrailerLength(File f) throws IOException { final int chunkSizeToRead = 256*1024; FileInputStream fis = new FileInputStream(f); try { byte buf[] = new byte[chunkSizeToRead]; FileChannel fc = fis.getChannel(); long size = fc.size(); long pos = size - (size % chunkSizeToRead); while (pos >= 0) { fc.position(pos); int readLen = (int) Math.min(size - pos, chunkSizeToRead); IOUtils.readFully(fis, buf, 0, readLen); for (int i = readLen - 1; i >= 0; i--) { if (buf[i] != FSEditLogOpCodes.OP_INVALID.getOpCode()) { return pos + i + 1; // + 1 since we count this byte! } } pos -= chunkSizeToRead; } return 0; } finally { fis.close(); } } @Test public void testStreamLimiter() throws IOException { final File LIMITER_TEST_FILE = new File(TEST_DIR, "limiter.test"); FileOutputStream fos = new FileOutputStream(LIMITER_TEST_FILE); try { fos.write(0x12); fos.write(0x12); fos.write(0x12); } finally { fos.close(); } FileInputStream fin = new FileInputStream(LIMITER_TEST_FILE); BufferedInputStream bin = new BufferedInputStream(fin); FSEditLogLoader.PositionTrackingInputStream tracker = new FSEditLogLoader.PositionTrackingInputStream(bin); try { tracker.setLimit(2); tracker.mark(100); tracker.read(); tracker.read(); try { tracker.read(); fail("expected to get IOException after reading past the limit"); } catch (IOException e) { } tracker.reset(); tracker.mark(100); byte arr[] = new byte[3]; try { tracker.read(arr); fail("expected to get IOException after reading past the limit"); } catch (IOException e) { } tracker.reset(); arr = new byte[2]; tracker.read(arr); } finally { tracker.close(); } } /** * Create an unfinalized edit log for testing purposes * * @param testDir Directory to create the edit log in * @param numTx Number of transactions to add to the new edit log * @param offsetToTxId A map from transaction IDs to offsets in the * edit log file. * @return The new edit log file name. * @throws IOException */ static private File prepareUnfinalizedTestEditLog(File testDir, int numTx, SortedMap<Long, Long> offsetToTxId) throws IOException { File inProgressFile = new File(testDir, NNStorage.getInProgressEditsFileName(1)); FSEditLog fsel = null, spyLog = null; try { fsel = FSImageTestUtil.createStandaloneEditLog(testDir); spyLog = spy(fsel); // Normally, the in-progress edit log would be finalized by // FSEditLog#endCurrentLogSegment. For testing purposes, we // disable that here. doNothing().when(spyLog).endCurrentLogSegment(true); spyLog.openForWrite(); assertTrue("should exist: " + inProgressFile, inProgressFile.exists()); for (int i = 0; i < numTx; i++) { long trueOffset = getNonTrailerLength(inProgressFile); long thisTxId = spyLog.getLastWrittenTxId() + 1; offsetToTxId.put(trueOffset, thisTxId); System.err.println("txid " + thisTxId + " at offset " + trueOffset); spyLog.logDelete("path" + i, i, false); spyLog.logSync(); } } finally { if (spyLog != null) { spyLog.close(); } else if (fsel != null) { fsel.close(); } } return inProgressFile; } @Test public void testValidateEditLogWithCorruptHeader() throws IOException { File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptHeader"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); File logFile = prepareUnfinalizedTestEditLog(testDir, 2, offsetToTxId); RandomAccessFile rwf = new RandomAccessFile(logFile, "rw"); try { rwf.seek(0); rwf.writeLong(42); // corrupt header } finally { rwf.close(); } EditLogValidation validation = EditLogFileInputStream.validateEditLog(logFile, Long.MAX_VALUE); assertTrue(validation.hasCorruptHeader()); } @Test public void testValidateEditLogWithCorruptBody() throws IOException { File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptBody"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); final int NUM_TXNS = 20; File logFile = prepareUnfinalizedTestEditLog(testDir, NUM_TXNS, offsetToTxId); // Back up the uncorrupted log File logFileBak = new File(testDir, logFile.getName() + ".bak"); Files.copy(logFile, logFileBak); EditLogValidation validation = EditLogFileInputStream.validateEditLog(logFile, Long.MAX_VALUE); assertTrue(!validation.hasCorruptHeader()); // We expect that there will be an OP_START_LOG_SEGMENT, followed by // NUM_TXNS opcodes, followed by an OP_END_LOG_SEGMENT. assertEquals(NUM_TXNS + 1, validation.getEndTxId()); // Corrupt each edit and verify that validation continues to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); corruptByteInFile(logFile, txOffset); validation = EditLogFileInputStream.validateEditLog(logFile, Long.MAX_VALUE); long expectedEndTxId = (txId == (NUM_TXNS + 1)) ? NUM_TXNS : (NUM_TXNS + 1); assertEquals("Failed when corrupting txn opcode at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); } // Truncate right before each edit and verify that validation continues // to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); truncateFile(logFile, txOffset); validation = EditLogFileInputStream.validateEditLog(logFile, Long.MAX_VALUE); long expectedEndTxId = (txId == 0) ? HdfsConstants.INVALID_TXID : (txId - 1); assertEquals("Failed when corrupting txid " + txId + " txn opcode " + "at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); } } @Test public void testValidateEmptyEditLog() throws IOException { File testDir = new File(TEST_DIR, "testValidateEmptyEditLog"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); File logFile = prepareUnfinalizedTestEditLog(testDir, 0, offsetToTxId); // Truncate the file so that there is nothing except the header and // layout flags section. truncateFile(logFile, 8); EditLogValidation validation = EditLogFileInputStream.validateEditLog(logFile, Long.MAX_VALUE); assertTrue(!validation.hasCorruptHeader()); assertEquals(HdfsConstants.INVALID_TXID, validation.getEndTxId()); } private static final Map<Byte, FSEditLogOpCodes> byteToEnum = new HashMap<Byte, FSEditLogOpCodes>(); static { for(FSEditLogOpCodes opCode : FSEditLogOpCodes.values()) { byteToEnum.put(opCode.getOpCode(), opCode); } } private static FSEditLogOpCodes fromByte(byte opCode) { return byteToEnum.get(opCode); } @Test public void testFSEditLogOpCodes() throws IOException { //try all codes for(FSEditLogOpCodes c : FSEditLogOpCodes.values()) { final byte code = c.getOpCode(); assertEquals("c=" + c + ", code=" + code, c, FSEditLogOpCodes.fromByte(code)); } //try all byte values for(int b = 0; b < (1 << Byte.SIZE); b++) { final byte code = (byte)b; assertEquals("b=" + b + ", code=" + code, fromByte(code), FSEditLogOpCodes.fromByte(code)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.client.console.notifications; import org.apache.syncope.client.console.events.EventCategoryPanel; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.Transformer; import org.apache.commons.lang3.tuple.Pair; import org.apache.syncope.client.console.commons.Constants; import org.apache.syncope.client.console.panels.search.AbstractSearchPanel; import org.apache.syncope.client.console.panels.search.AnyObjectSearchPanel; import org.apache.syncope.client.console.panels.search.GroupSearchPanel; import org.apache.syncope.client.console.panels.search.SearchClause; import org.apache.syncope.client.console.panels.search.UserSearchPanel; import org.apache.syncope.client.console.rest.AnyTypeRestClient; import org.apache.syncope.client.console.rest.LoggerRestClient; import org.apache.syncope.client.console.rest.NotificationRestClient; import org.apache.syncope.client.console.rest.SchemaRestClient; import org.apache.syncope.client.console.wicket.markup.html.form.AjaxCheckBoxPanel; import org.apache.syncope.client.console.wicket.markup.html.form.AjaxDropDownChoicePanel; import org.apache.syncope.client.console.wicket.markup.html.form.AjaxTextFieldPanel; import org.apache.syncope.client.console.wicket.markup.html.form.MultiFieldPanel; import org.apache.syncope.client.console.wicket.markup.html.form.MultiPanel; import org.apache.syncope.client.console.wizards.AjaxWizardBuilder; import org.apache.syncope.common.lib.EntityTOUtils; import org.apache.syncope.common.lib.to.AnyTypeTO; import org.apache.syncope.common.lib.to.DerSchemaTO; import org.apache.syncope.common.lib.to.MailTemplateTO; import org.apache.syncope.common.lib.to.NotificationTO; import org.apache.syncope.common.lib.to.PlainSchemaTO; import org.apache.syncope.common.lib.to.VirSchemaTO; import org.apache.syncope.common.lib.types.AnyTypeKind; import org.apache.syncope.common.lib.types.SchemaType; import org.apache.syncope.common.lib.types.TraceLevel; import org.apache.wicket.PageReference; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.extensions.wizard.WizardModel; import org.apache.wicket.extensions.wizard.WizardStep; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.model.util.ListModel; import org.apache.wicket.validation.validator.EmailAddressValidator; public class NotificationWizardBuilder extends AjaxWizardBuilder<NotificationWrapper> { private static final long serialVersionUID = -1975312550059578553L; private final NotificationRestClient restClient = new NotificationRestClient(); private final SchemaRestClient schemaRestClient = new SchemaRestClient(); private final LoggerRestClient loggerRestClient = new LoggerRestClient(); /** * Construct. * * @param notificationTO notification. * @param pageRef Caller page reference. */ public NotificationWizardBuilder(final NotificationTO notificationTO, final PageReference pageRef) { super(new NotificationWrapper(notificationTO), pageRef); } @Override protected Serializable onApplyInternal(final NotificationWrapper modelObject) { modelObject.fillRecipientConditions(); modelObject.fillAboutConditions(); final boolean createFlag = modelObject.getInnerObject().getKey() == null; if (createFlag) { restClient.create(modelObject.getInnerObject()); } else { restClient.update(modelObject.getInnerObject()); } return null; } @Override protected WizardModel buildModelSteps(final NotificationWrapper modelObject, final WizardModel wizardModel) { wizardModel.add(new NotificationWizardBuilder.Details(modelObject)); wizardModel.add(new NotificationWizardBuilder.Events(modelObject)); wizardModel.add(new NotificationWizardBuilder.Abouts(modelObject)); wizardModel.add(new NotificationWizardBuilder.Recipients(modelObject)); return wizardModel; } public class Details extends WizardStep { private static final long serialVersionUID = -7709805590497687958L; public Details(final NotificationWrapper modelObject) { NotificationTO notificationTO = modelObject.getInnerObject(); boolean createFlag = notificationTO.getKey() == null; AjaxTextFieldPanel sender = new AjaxTextFieldPanel("sender", getString("sender"), new PropertyModel<String>(notificationTO, "sender")); sender.addRequiredLabel(); sender.addValidator(EmailAddressValidator.getInstance()); add(sender); AjaxTextFieldPanel subject = new AjaxTextFieldPanel("subject", getString("subject"), new PropertyModel<String>(notificationTO, "subject")); subject.addRequiredLabel(); add(subject); AjaxTextFieldPanel recipientAttrName = new AjaxTextFieldPanel( "recipientAttrName", new ResourceModel("recipientAttrName", "recipientAttrName").getObject(), new PropertyModel<String>(notificationTO, "recipientAttrName")); recipientAttrName.setChoices(getSchemaNames()); recipientAttrName.addRequiredLabel(); recipientAttrName.setTitle(getString("intAttrNameInfo.help") + "<div style=\"font-size: 10px;\">" + "<code>groups[groupName].attribute</code>\n" + "<code>anyObjects[anyObjectName].attribute</code>\n" + "<code>memberships[groupName].attribute</code>\n" + "</div>", true); add(recipientAttrName); AjaxDropDownChoicePanel<String> template = new AjaxDropDownChoicePanel<>( "template", getString("template"), new PropertyModel<String>(notificationTO, "template")); template.setChoices(CollectionUtils.collect( restClient.listTemplates(), new Transformer<MailTemplateTO, String>() { @Override public String transform(final MailTemplateTO input) { return input.getKey(); } }, new ArrayList<String>())); template.addRequiredLabel(); add(template); AjaxDropDownChoicePanel<TraceLevel> traceLevel = new AjaxDropDownChoicePanel<>( "traceLevel", getString("traceLevel"), new PropertyModel<TraceLevel>(notificationTO, "traceLevel")); traceLevel.setChoices(Arrays.asList(TraceLevel.values())); traceLevel.addRequiredLabel(); add(traceLevel); final AjaxCheckBoxPanel isActive = new AjaxCheckBoxPanel("isActive", getString("isActive"), new PropertyModel<Boolean>(notificationTO, "active")); if (createFlag) { isActive.getField().setDefaultModelObject(Boolean.TRUE); } add(isActive); } } public class Events extends WizardStep { private static final long serialVersionUID = -7709805590497687958L; public Events(final NotificationWrapper modelObject) { add(new EventCategoryPanel( "eventSelection", loggerRestClient.listEvents(), new PropertyModel<List<String>>(modelObject.getInnerObject(), "events")) { private static final long serialVersionUID = 6429053774964787735L; @Override protected List<String> getListAuthRoles() { return Collections.emptyList(); } @Override protected List<String> getChangeAuthRoles() { return Collections.emptyList(); } }); } } public class About extends Panel { private static final long serialVersionUID = -9149543787708482882L; public About(final String id, final IModel<Pair<String, List<SearchClause>>> model) { super(id, model); setOutputMarkupId(true); List<String> anyTypeTOs = CollectionUtils.collect( new AnyTypeRestClient().list(), EntityTOUtils.<AnyTypeTO>keyTransformer(), new ArrayList<String>()); final AjaxDropDownChoicePanel<String> type = new AjaxDropDownChoicePanel<>("about", "about", new Model<String>() { private static final long serialVersionUID = -2350296434572623272L; @Override public String getObject() { return model.getObject().getLeft(); } @Override public void setObject(final String object) { model.setObject(Pair.of(object, model.getObject().getRight())); } }); type.setChoices(anyTypeTOs); type.addRequiredLabel(); add(type); final ListModel<SearchClause> clauseModel = new ListModel<SearchClause>() { private static final long serialVersionUID = 3769540249683319782L; @Override public List<SearchClause> getObject() { return model.getObject().getRight(); } @Override public void setObject(final List<SearchClause> object) { model.getObject().setValue(object); } }; final WebMarkupContainer searchContainer = new WebMarkupContainer("search"); add(searchContainer.setOutputMarkupId(true)); searchContainer.add(getClauseBuilder(model.getObject().getLeft(), clauseModel).build("clauses")); type.getField().add(new AjaxFormComponentUpdatingBehavior(Constants.ON_CHANGE) { private static final long serialVersionUID = -1107858522700306810L; @Override protected void onUpdate(final AjaxRequestTarget target) { clauseModel.getObject().clear(); searchContainer.addOrReplace(getClauseBuilder(type.getModelObject(), clauseModel).build("clauses"). setRenderBodyOnly(true)); target.add(searchContainer); } }); } private AbstractSearchPanel.Builder<?> getClauseBuilder( final String type, final ListModel<SearchClause> clauseModel) { AbstractSearchPanel.Builder<?> clause; switch (type) { case "USER": clause = new UserSearchPanel.Builder(clauseModel); break; case "GROUP": clause = new GroupSearchPanel.Builder(clauseModel); break; default: clause = new AnyObjectSearchPanel.Builder(type, clauseModel); } return clause; } } public class Abouts extends WizardStep { private static final long serialVersionUID = -7709805590497687958L; public Abouts(final NotificationWrapper modelObject) { final WebMarkupContainer aboutContainer = new WebMarkupContainer("about"); aboutContainer.setOutputMarkupId(true); add(aboutContainer); final IModel<List<Pair<String, List<SearchClause>>>> model = new PropertyModel<>(modelObject, "aboutClauses"); aboutContainer.add(new MultiPanel<Pair<String, List<SearchClause>>>("abouts", "abouts", model, false) { private static final long serialVersionUID = -2481579077338205547L; @Override protected Pair<String, List<SearchClause>> newModelObject() { return Pair.<String, List<SearchClause>>of(AnyTypeKind.USER.name(), new ArrayList<SearchClause>()); } @Override protected About getItemPanel(final ListItem<Pair<String, List<SearchClause>>> item) { return new About("panel", new Model<Pair<String, List<SearchClause>>>() { private static final long serialVersionUID = 6799404673615637845L; @Override public Pair<String, List<SearchClause>> getObject() { return item.getModelObject(); } @Override public void setObject(final Pair<String, List<SearchClause>> object) { item.setModelObject(object); } @Override public void detach() { // no detach } }); } }.hideLabel()); } } public class Recipients extends WizardStep { private static final long serialVersionUID = -7709805590497687958L; public Recipients(final NotificationWrapper modelObject) { final NotificationTO notificationTO = modelObject.getInnerObject(); final boolean createFlag = notificationTO.getKey() == null; final AjaxTextFieldPanel staticRecipientsFieldPanel = new AjaxTextFieldPanel("panel", "staticRecipients", new Model<String>()); staticRecipientsFieldPanel.addValidator(EmailAddressValidator.getInstance()); final MultiFieldPanel<String> staticRecipients = new MultiFieldPanel.Builder<>( new PropertyModel<List<String>>(notificationTO, "staticRecipients")). build("staticRecipients", "staticRecipients", staticRecipientsFieldPanel); add(staticRecipients.hideLabel()); final AnyObjectSearchPanel recipients = new UserSearchPanel.Builder( new PropertyModel<List<SearchClause>>(modelObject, "recipientClauses")). required(false).build("recipients"); add(recipients); final AjaxCheckBoxPanel selfAsRecipient = new AjaxCheckBoxPanel("selfAsRecipient", getString("selfAsRecipient"), new PropertyModel<Boolean>(notificationTO, "selfAsRecipient")); add(selfAsRecipient); if (createFlag) { selfAsRecipient.getField().setDefaultModelObject(Boolean.FALSE); } } } private List<String> getSchemaNames() { List<String> result = new ArrayList<>(); result.add("username"); CollectionUtils.collect( schemaRestClient.<PlainSchemaTO>getSchemas(SchemaType.PLAIN, AnyTypeKind.USER.name()), EntityTOUtils.<PlainSchemaTO>keyTransformer(), result); CollectionUtils.collect( schemaRestClient.<DerSchemaTO>getSchemas(SchemaType.DERIVED, AnyTypeKind.USER.name()), EntityTOUtils.<DerSchemaTO>keyTransformer(), result); CollectionUtils.collect( schemaRestClient.<VirSchemaTO>getSchemas(SchemaType.VIRTUAL, AnyTypeKind.USER.name()), EntityTOUtils.<VirSchemaTO>keyTransformer(), result); Collections.sort(result); return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.io; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.BitSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ValidCompactorWriteIdList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.AcidUtils.AcidOperationalProperties; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem; import org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath; import org.apache.hadoop.hive.ql.io.orc.TestOrcRawRecordMerger; import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId; import org.junit.Test; public class TestAcidUtils { @Test public void testCreateFilename() throws Exception { Path p = new Path("/tmp"); Configuration conf = new Configuration(); AcidOutputFormat.Options options = new AcidOutputFormat.Options(conf) .setOldStyle(true).bucket(1); assertEquals("/tmp/000001_0", AcidUtils.createFilename(p, options).toString()); options.bucket(123); assertEquals("/tmp/000123_0", AcidUtils.createFilename(p, options).toString()); options.bucket(23) .minimumWriteId(100) .maximumWriteId(200) .writingBase(true) .setOldStyle(false); assertEquals("/tmp/base_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingBase(false); assertEquals("/tmp/delta_0000100_0000200_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(false); options.statementId(-1); assertEquals("/tmp/delta_0000100_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(false); options.statementId(7); assertEquals("/tmp/delta_0000100_0000200_0007/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingDeleteDelta(true); assertEquals("/tmp/delete_delta_0000100_0000200_0007/bucket_00023", AcidUtils.createFilename(p, options).toString()); } @Test public void testCreateFilenameLargeIds() throws Exception { Path p = new Path("/tmp"); Configuration conf = new Configuration(); AcidOutputFormat.Options options = new AcidOutputFormat.Options(conf) .setOldStyle(true).bucket(123456789); assertEquals("/tmp/123456789_0", AcidUtils.createFilename(p, options).toString()); options.bucket(23) .minimumWriteId(1234567880) .maximumWriteId(1234567890) .writingBase(true) .setOldStyle(false); assertEquals("/tmp/base_1234567890/bucket_00023", AcidUtils.createFilename(p, options).toString()); options.writingBase(false); assertEquals("/tmp/delta_1234567880_1234567890_0000/bucket_00023", AcidUtils.createFilename(p, options).toString()); } @Test public void testParsing() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, //new MockFile("mock:/tmp/base_000123/bucket_00001", 500, new byte[0]), new MockFile("mock:/tmp/delta_000005_000006/bucket_00001", 500, new byte[0]), new MockFile("mock:/tmp/delete_delta_000005_000006/bucket_00001", 500, new byte[0])); assertEquals(123, AcidUtils.ParsedBase.parseBase(new Path("/tmp/base_000123")).getWriteId()); assertEquals(0, AcidUtils.ParsedBase.parseBase(new Path("/tmp/base_000123")).getVisibilityTxnId()); Path dir = new Path("mock:/tmp/"); AcidOutputFormat.Options opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "base_567/bucket_123"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(true, opts.isWritingBase()); assertEquals(567, opts.getMaximumWriteId()); assertEquals(0, opts.getMinimumWriteId()); assertEquals(123, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename( new MockPath(fs, dir + "/delta_000005_000006/bucket_00001"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(false, opts.isWritingBase()); assertEquals(6, opts.getMaximumWriteId()); assertEquals(5, opts.getMinimumWriteId()); assertEquals(1, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename( new MockPath(fs, dir + "/delete_delta_000005_000006/bucket_00001"), conf); assertEquals(false, opts.getOldStyle()); assertEquals(false, opts.isWritingBase()); assertEquals(6, opts.getMaximumWriteId()); assertEquals(5, opts.getMinimumWriteId()); assertEquals(1, opts.getBucketId()); opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "000123_0"), conf); assertEquals(true, opts.getOldStyle()); assertEquals(true, opts.isWritingBase()); assertEquals(123, opts.getBucketId()); assertEquals(0, opts.getMinimumWriteId()); assertEquals(0, opts.getMaximumWriteId()); } @Test public void testOriginal() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "2", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000002_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/random", 500, new byte[0]), new MockFile("mock:/tbl/part1/_done", 0, new byte[0]), new MockFile("mock:/tbl/part1/subdir/000000_0", 0, new byte[0])); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals(null, dir.getBaseDirectory()); assertEquals(0, dir.getCurrentDirectories().size()); assertEquals(0, dir.getObsolete().size()); List<HdfsFileStatusWithId> result = dir.getOriginalFiles(); assertEquals(7, result.size()); assertEquals("mock:/tbl/part1/000000_0", result.get(0).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "1", result.get(1).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000000_0" + Utilities.COPY_KEYWORD + "2", result.get(2).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000001_1", result.get(3).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000002_0", result.get(4).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/random", result.get(5).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/subdir/000000_0", result.get(6).getFileStatus().getPath().toString()); } @Test public void testOriginalDeltas() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0]), new MockFile("mock:/tbl/part1/000002_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/random", 500, new byte[0]), new MockFile("mock:/tbl/part1/_done", 0, new byte[0]), new MockFile("mock:/tbl/part1/subdir/000000_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_100/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals(null, dir.getBaseDirectory()); List<Path> obsolete = dir.getObsolete(); assertEquals(2, obsolete.size()); assertEquals("mock:/tbl/part1/delta_025_025", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_029_029", obsolete.get(1).toString()); List<HdfsFileStatusWithId> result = dir.getOriginalFiles(); assertEquals(5, result.size()); assertEquals("mock:/tbl/part1/000000_0", result.get(0).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000001_1", result.get(1).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/000002_0", result.get(2).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/random", result.get(3).getFileStatus().getPath().toString()); assertEquals("mock:/tbl/part1/subdir/000000_0", result.get(4).getFileStatus().getPath().toString()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(2, deltas.size()); AcidUtils.ParsedDelta delt = deltas.get(0); assertEquals("mock:/tbl/part1/delta_025_030", delt.getPath().toString()); assertEquals(25, delt.getMinWriteId()); assertEquals(30, delt.getMaxWriteId()); delt = deltas.get(1); assertEquals("mock:/tbl/part1/delta_050_100", delt.getPath().toString()); assertEquals(50, delt.getMinWriteId()); assertEquals(100, delt.getMaxWriteId()); } @Test public void testBaseDeltas() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_49/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_90_120/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals("mock:/tbl/part1/base_49", dir.getBaseDirectory().toString()); List<Path> obsoletes = dir.getObsolete(); assertEquals(5, obsoletes.size()); Set<String> obsoletePathNames = new HashSet<String>(); for (Path obsolete : obsoletes) { obsoletePathNames.add(obsolete.toString()); } assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_5")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_10")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_025")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_029_029")); assertEquals(0, dir.getOriginalFiles().size()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(1, deltas.size()); AcidUtils.ParsedDelta delt = deltas.get(0); assertEquals("mock:/tbl/part1/delta_050_105", delt.getPath().toString()); assertEquals(50, delt.getMinWriteId()); assertEquals(105, delt.getMaxWriteId()); } @Test public void testObsoleteOriginals() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0])); Path part = new MockPath(fs, "/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:150:" + Long.MAX_VALUE + ":"), null, false, null, false); // Obsolete list should include the two original bucket files, and the old base dir List<Path> obsoletes = dir.getObsolete(); assertEquals(3, obsoletes.size()); assertEquals("mock:/tbl/part1/base_5", obsoletes.get(0).toString()); assertEquals("mock:/tbl/part1/base_10", dir.getBaseDirectory().toString()); } @Test public void testOverlapingDelta() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(2, obsolete.size()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(1).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(4, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(3).getPath().toString()); } /** * Hive 1.3.0 delta dir naming scheme which supports multi-statement txns * @throws Exception */ @Test public void testOverlapingDelta2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_4/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_058_58/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(5, obsolete.size()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_058_58", obsolete.get(1).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_1", obsolete.get(2).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_4", obsolete.get(3).toString()); assertEquals("mock:/tbl/part1/delta_0060_60_7", obsolete.get(4).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(5, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61_0", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62_0", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62_3", delts.get(3).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63_0", delts.get(4).getPath().toString()); } @Test public void deltasWithOpenTxnInRead() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString()); } /** * @since 1.3.0 * @throws Exception */ @Test public void deltasWithOpenTxnInRead2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString()); } @Test public void deltasWithOpenTxnsNotInCompact() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:4:" + Long.MAX_VALUE), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); } @Test public void deltasWithOpenTxnsNotInCompact2() throws Exception { Configuration conf = new Configuration(); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0" + AcidUtils.DELTA_SIDE_FILE_SUFFIX, 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_6_10/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:3:" + Long.MAX_VALUE), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); } @Test public void testBaseWithDeleteDeltas() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_49/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_025/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_029_029/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_025_030/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_050_105/bucket_0", 0, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_110_110/bucket_0", 0, new byte[0])); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals("mock:/tbl/part1/base_49", dir.getBaseDirectory().toString()); List<Path> obsoletes = dir.getObsolete(); assertEquals(7, obsoletes.size()); Set<String> obsoletePathNames = new HashSet<String>(); for (Path obsolete : obsoletes) { obsoletePathNames.add(obsolete.toString()); } assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_5")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/base_10")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delete_delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_030")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_025_025")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delete_delta_029_029")); assertTrue(obsoletePathNames.contains("mock:/tbl/part1/delta_029_029")); assertEquals(0, dir.getOriginalFiles().size()); List<AcidUtils.ParsedDelta> deltas = dir.getCurrentDirectories(); assertEquals(2, deltas.size()); assertEquals("mock:/tbl/part1/delete_delta_050_105", deltas.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_050_105", deltas.get(1).getPath().toString()); // The delete_delta_110_110 should not be read because it is greater than the high watermark. } @Test public void testOverlapingDeltaAndDeleteDelta() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_00064_64/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString()); List<Path> obsolete = dir.getObsolete(); assertEquals(3, obsolete.size()); assertEquals("mock:/tbl/part1/delete_delta_052_55", obsolete.get(0).toString()); assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(1).toString()); assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(2).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(6, delts.size()); assertEquals("mock:/tbl/part1/delete_delta_40_60", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(2).getPath().toString()); assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(3).getPath().toString()); assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(4).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_00064_64", delts.get(5).getPath().toString()); } @Test public void testMinorCompactedDeltaMakesInBetweenDelteDeltaObsolete() throws Exception { // This test checks that if we have a minor compacted delta for the txn range [40,60] // then it will make any delete delta in that range as obsolete. Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_50_50/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false, null, false); List<Path> obsolete = dir.getObsolete(); assertEquals(1, obsolete.size()); assertEquals("mock:/tbl/part1/delete_delta_50_50", obsolete.get(0).toString()); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(1, delts.size()); assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString()); } @Test public void deltasAndDeleteDeltasWithOpenTxnsNotInCompact() throws Exception { // This tests checks that appropriate delta and delete_deltas are included when minor // compactions specifies a valid open txn range. Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_2/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0" + AcidUtils.DELTA_SIDE_FILE_SUFFIX, 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_7_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_6_10/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidCompactorWriteIdList("tbl:4:" + Long.MAX_VALUE + ":"), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(2, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_2_2", delts.get(1).getPath().toString()); } @Test public void deleteDeltasWithOpenTxnInRead() throws Exception { Configuration conf = new Configuration(); conf.setInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, AcidUtils.AcidOperationalProperties.getDefault().toInt()); MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delete_delta_3_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0", 500, new byte[0])); Path part = new MockPath(fs, "mock:/tbl/part1"); //hypothetically, txn 50 is open and writing write ID 4 conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[] {50}, new BitSet(), 1000, 55).writeToString()); AcidUtils.Directory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:4:4"), null, false, null, false); List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories(); assertEquals(3, delts.size()); assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString()); assertEquals("mock:/tbl/part1/delete_delta_2_5", delts.get(1).getPath().toString()); assertEquals("mock:/tbl/part1/delta_2_5", delts.get(2).getPath().toString()); // Note that delete_delta_3_3 should not be read, when a minor compacted // [delete_]delta_2_5 is present. } @Test public void testDeleteDeltaSubdirPathGeneration() throws Exception { String deleteDeltaSubdirPath = AcidUtils.deleteDeltaSubdir(1, 10); assertEquals("delete_delta_0000001_0000010", deleteDeltaSubdirPath); deleteDeltaSubdirPath = AcidUtils.deleteDeltaSubdir(1, 10, 5); assertEquals("delete_delta_0000001_0000010_0005", deleteDeltaSubdirPath); } @Test public void testDeleteEventDeltaDirPathFilter() throws Exception { Path positivePath = new Path("delete_delta_000001_000010"); Path negativePath = new Path("delta_000001_000010"); assertEquals(true, AcidUtils.deleteEventDeltaDirFilter.accept(positivePath)); assertEquals(false, AcidUtils.deleteEventDeltaDirFilter.accept(negativePath)); } @Test public void testAcidOperationalProperties() throws Exception { AcidUtils.AcidOperationalProperties testObj = AcidUtils.AcidOperationalProperties.getDefault(); assertsForAcidOperationalProperties(testObj, "default"); testObj = AcidUtils.AcidOperationalProperties.parseInt(1); assertsForAcidOperationalProperties(testObj, "split_update"); testObj = AcidUtils.AcidOperationalProperties.parseString("default"); assertsForAcidOperationalProperties(testObj, "default"); } private void assertsForAcidOperationalProperties(AcidUtils.AcidOperationalProperties testObj, String type) throws Exception { switch(type) { case "split_update": case "default": assertEquals(true, testObj.isSplitUpdate()); assertEquals(false, testObj.isHashBasedMerge()); assertEquals(1, testObj.toInt()); assertEquals("|split_update", testObj.toString()); break; default: break; } } @Test public void testAcidOperationalPropertiesSettersAndGetters() throws Exception { AcidUtils.AcidOperationalProperties oprProps = AcidUtils.AcidOperationalProperties.getDefault(); Configuration testConf = new Configuration(); // Test setter for configuration object. AcidUtils.setAcidOperationalProperties(testConf, true, oprProps); assertEquals(1, testConf.getInt(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname, -1)); // Test getter for configuration object. assertEquals(oprProps.toString(), AcidUtils.getAcidOperationalProperties(testConf).toString()); Map<String, String> parameters = new HashMap<String, String>(); // Test setter for map object. AcidUtils.setAcidOperationalProperties(parameters, true, oprProps); assertEquals(oprProps.toString(), parameters.get(HiveConf.ConfVars.HIVE_TXN_OPERATIONAL_PROPERTIES.varname)); // Test getter for map object. assertEquals(1, AcidUtils.getAcidOperationalProperties(parameters).toInt()); parameters.put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES, oprProps.toString()); // Set the appropriate key in the map and test that we are able to read it back correctly. assertEquals(1, AcidUtils.getAcidOperationalProperties(parameters).toInt()); } /** * See {@link TestOrcRawRecordMerger#testGetLogicalLength()} */ @Test public void testGetLogicalLength() throws Exception { } }
package think.rpgitems.power; import cat.nyaa.nyaacore.CommandReceiver; import cat.nyaa.nyaacore.LanguageRepository; import cat.nyaa.nyaacore.Message; import cat.nyaa.nyaacore.Pair; import net.md_5.bungee.api.chat.BaseComponent; import org.bukkit.Bukkit; import org.bukkit.NamespacedKey; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.librazy.nclangchecker.LangKey; import think.rpgitems.RPGItems; import think.rpgitems.item.ItemManager; import think.rpgitems.item.RPGItem; import think.rpgitems.power.impl.PowerSelector; import java.lang.reflect.Field; import java.lang.reflect.ParameterizedType; import java.util.*; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static think.rpgitems.power.PowerManager.powers; public abstract class RPGCommandReceiver extends CommandReceiver { private final Map<String, String> subCommandAttribute = new HashMap<>(); private static final Map<String, String> commandAttributes = new HashMap<>(); private final LanguageRepository i18n; public RPGCommandReceiver(RPGItems plugin, LanguageRepository i18n) { super(plugin, i18n); this.i18n = i18n; subCommands.forEach( (s, method) -> { Attribute attr = method.getAnnotation(Attribute.class); if (attr == null) return; subCommandAttribute.put(s, attr.value()); } ); } private static List<String> resolvePropertyValueSuggestion(RPGItem item, Class<? extends Power> power, String propertyName, String last, boolean hasNamePrefix) { try { return resolvePropertyValueSuggestion(item, power, power.getField(propertyName), last, hasNamePrefix); } catch (NoSuchFieldException e) { return Collections.emptyList(); } } @SuppressWarnings("unchecked") private static List<String> resolvePropertyValueSuggestion(RPGItem item, Class<? extends Power> power, Field propertyField, String last, boolean hasNamePrefix) { BooleanChoice bc = propertyField.getAnnotation(BooleanChoice.class); if (bc != null) { return Stream.of(bc.trueChoice(), bc.falseChoice()).map(s -> (hasNamePrefix ? propertyField.getName() + ":" : "") + s).filter(s -> s.startsWith(last)).collect(Collectors.toList()); } if (Collection.class.isAssignableFrom(propertyField.getType())) { ParameterizedType listType = (ParameterizedType) propertyField.getGenericType(); Class<?> listArg = (Class<?>) listType.getActualTypeArguments()[0]; if (listArg.equals(Trigger.class)) { return resolveEnumListValue(power, propertyField, new ArrayList<>(Trigger.keySet()), last, hasNamePrefix); } if (!listArg.isEnum()) { if (propertyField.getName().equalsIgnoreCase("conditions")) { List<PowerCondition> conditions = item.getPower(PowerCondition.class, true); List<String> conditionIds = conditions.stream().map(PowerCondition::id).collect(Collectors.toList()); return resolveEnumListValue(power, propertyField, conditionIds, last, hasNamePrefix); } if (propertyField.getName().equalsIgnoreCase("selectors")) { List<PowerSelector> selectors = item.getPower(PowerSelector.class); List<String> selectorIds = selectors.stream().map(PowerSelector::id).collect(Collectors.toList()); return resolveEnumListValue(power, propertyField, selectorIds, last, hasNamePrefix); } return Collections.emptyList(); } List<String> enumValues = Stream.of(((Class<? extends Enum>) listArg).getEnumConstants()).map(Enum::name).collect(Collectors.toList()); return resolveEnumListValue(power, propertyField, enumValues, last, hasNamePrefix); } AcceptedValue as = propertyField.getAnnotation(AcceptedValue.class); if (as != null) { return PowerManager.getAcceptedValue(power, as).stream().map(s -> (hasNamePrefix ? propertyField.getName() + ":" : "") + s).filter(s -> s.startsWith(last)).collect(Collectors.toList()); } if (propertyField.getType().equals(boolean.class) || propertyField.getType().equals(Boolean.class)) { return Stream.of(true, false).map(s -> (hasNamePrefix ? propertyField.getName() + ":" : "") + s).filter(s -> s.startsWith(last)).collect(Collectors.toList()); } if (propertyField.getType().isEnum()) { return Stream.of(propertyField.getType().getEnumConstants()).map(s -> (hasNamePrefix ? propertyField.getName() + ":" : "") + s.toString()).filter(s -> s.startsWith(last)).collect(Collectors.toList()); } return Collections.emptyList(); } private static List<String> resolveEnumListValue(Class<? extends Power> power, Field propertyField, List<String> enumValues, String last, boolean hasNamePrefix) { String currentValuesStr; if (hasNamePrefix) { currentValuesStr = last.replace(propertyField.getName() + ":", ""); } else { currentValuesStr = last; } List<String> currentVaules = Stream.of(currentValuesStr.split(",")).collect(Collectors.toList()); int size = currentVaules.size(); String lastVaule = size > 0 ? currentVaules.get(size - 1) : ""; if (enumValues.contains(lastVaule)) { lastVaule = ""; } else { currentVaules.remove(size - 1); } AcceptedValue as = propertyField.getAnnotation(AcceptedValue.class); if (as != null) { List<String> acceptedValue = PowerManager.getAcceptedValue(power, as); enumValues.retainAll(acceptedValue); } String incompleteValue = lastVaule; if (Set.class.isAssignableFrom(propertyField.getType()) || (as != null && as.preset() == Preset.TRIGGERS)) { enumValues.removeAll(currentVaules); } String base = incompleteValue.isEmpty() ? last : last.replaceAll(incompleteValue + "$", ""); boolean next = (currentVaules.isEmpty() && !hasNamePrefix) || base.endsWith(":") || base.endsWith(","); return enumValues.stream().filter(n -> n.startsWith(incompleteValue)).map(n -> base + (next ? "" : ",") + n).collect(Collectors.toList()); } private List<String> resolvePowerOrPropertySuggestion(CommandSender sender, String[] args) { if (args.length < 4) return Collections.emptyList(); String last = args[args.length - 1]; String[] arg = Arrays.copyOf(args, args.length - 1); Arguments cmd = Arguments.parse(arg, sender); if (cmd == null) return Collections.emptyList(); Pair<RPGItem, String> itemCommand = resolveItemCommand(cmd.next(), cmd.next()); if (itemCommand == null) return Collections.emptyList(); switch (itemCommand.getValue()) { case "get": case "set": { return resolveGetSet(last, cmd, itemCommand); } case "power": { return resolvePowerProperties(sender, itemCommand.getKey(), last, cmd); } default: return Collections.emptyList(); } } private List<String> resolvePowerProperties(CommandSender sender, RPGItem item, String last, Arguments cmd) { @LangKey(skipCheck = true) String powName = cmd.next(); NamespacedKey powerKey; try { powerKey = PowerManager.parseKey(powName); } catch (UnknownExtensionException e) { return Collections.emptyList(); } Class<? extends Power> power = powers.get(powerKey); if (power == null) return Collections.emptyList(); Map<String, PowerProperty> argMap = PowerManager.getProperties(power); Set<Field> settled = new HashSet<>(); List<Field> required = argMap.values().stream() .filter(PowerProperty::required) .sorted(Comparator.comparing(PowerProperty::order)) .map(PowerProperty::field) .collect(Collectors.toList()); PowerMeta powerMeta = power.getAnnotation(PowerMeta.class); for (Map.Entry<String, PowerProperty> prop : argMap.entrySet()) { Field field = prop.getValue().field(); String name = prop.getKey(); String value = cmd.argString(name, null); if (value != null || isTrivialProperty(powerMeta, name) ) { required.remove(field); settled.add(field); } } if (settled.isEmpty()) { actionBarTip(sender, powerKey, null); } return resolvePropertiesSuggestions(sender, item, last, power, argMap, settled, required); } protected boolean isTrivialProperty(PowerMeta powerMeta, String name) { return (powerMeta.immutableTrigger() && name.equals("triggers")) || (powerMeta.marker() && name.equals("triggers")) || (powerMeta.marker() && name.equals("conditions") && !powerMeta.withConditions()) || (!powerMeta.withSelectors() && name.equals("selectors")) || (!powerMeta.withContext() && name.equals("requiredContext")) || name.equals("displayName"); } private List<String> resolvePropertiesSuggestions(CommandSender sender, RPGItem item, String last, Class<? extends Power> power, Map<String, PowerProperty> argMap, Set<Field> settled, List<Field> required) { if (argMap.keySet().stream().anyMatch(f -> last.startsWith(f + ":"))) {//we are suggesting a value as we have the complete property name String currentPropertyName = last.split(":")[0]; actionBarTip(sender, powers.inverse().get(power), currentPropertyName); return resolvePropertyValueSuggestion(item, power, currentPropertyName, last, true); } List<String> suggestions; suggestions = required.stream().map(s -> s.getName() + ":").filter(s -> s.startsWith(last)).collect(Collectors.toList()); if (!suggestions.isEmpty()) return suggestions; //required property suggestions = argMap.values().stream().filter(s -> !settled.contains(s.field())).map(s -> s.field().getName() + ":").filter(s -> s.startsWith(last)).collect(Collectors.toList()); return suggestions; //unsettled property } private void actionBarTip(CommandSender sender, NamespacedKey power, String property) { if (sender instanceof Player) { Bukkit.getScheduler().runTask(RPGItems.plugin, () -> { String description = PowerManager.getDescription(power, property); if (description == null) { return; } new Message(description).send((Player) sender, Message.MessageType.ACTION_BAR); }); } } private List<String> resolveGetSet(String last, Arguments cmd, Pair<RPGItem, String> itemCommand) { RPGItem item = itemCommand.getKey(); String powerName = cmd.next(); NamespacedKey key; try { key = PowerManager.parseKey(powerName); } catch (UnknownExtensionException e) { return Collections.emptyList(); } List<Power> powers = item.getPowers().stream().filter(p -> item.getPowerKey(p).equals(key)).collect(Collectors.toList()); if (powers.isEmpty()) return Collections.emptyList(); Class<? extends Power> powerClass = powers.get(0).getClass(); if (cmd.top() == null) { // rpgitem item get/set power return IntStream.rangeClosed(1, powers.size()).mapToObj(Integer::toString).collect(Collectors.toList()); } else { // rpgitem item get/set power 1 ... cmd.next(); } if (cmd.top() == null) { // rpgitem item get/set power 1 return PowerManager.getProperties(powerClass).keySet().stream().filter(s -> s.startsWith(last)).collect(Collectors.toList()); } if (itemCommand.getValue().equals("get")) return Collections.emptyList(); // rpgitem item set power 1 property return resolvePropertyValueSuggestion(item, powerClass, cmd.next(), last, false); } private static Pair<RPGItem, String> resolveItemCommand(String f, String s) { Optional<RPGItem> rpgItem = ItemManager.getItem(f); if (rpgItem.isPresent()) { return new Pair<>(rpgItem.get(), s); } rpgItem = ItemManager.getItem(s); return rpgItem.map(r -> new Pair<>(r, f)).orElse(null); } private static List<String> resolveSet(Set<String> values, String last) { List<String> currentVaules = Stream.of(last.split(",")).collect(Collectors.toList()); String lastVaule = currentVaules.get(currentVaules.size() - 1); if (values.contains(lastVaule)) { lastVaule = ""; } else { currentVaules.remove(currentVaules.size() - 1); } values.removeAll(currentVaules); String incompleteValue = lastVaule; String base = incompleteValue.isEmpty() ? last : last.replaceAll(incompleteValue + "$", ""); boolean next = currentVaules.isEmpty() || base.endsWith(","); return values.stream().filter(n -> n.startsWith(incompleteValue)).map(n -> base + (next ? "" : ",") + n).collect(Collectors.toList()); } @Override public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { if ( (args.length > 0 && ItemManager.getItem(args[0]).isPresent()) || (args.length > 1 && args[1].equals("create") && !ItemManager.getItem(args[0]).isPresent()) ) { if (args.length > 1) { String cmd = args[1]; args[1] = args[0]; args[0] = cmd; } else { String name = args[0]; args = new String[args.length + 1]; args[1] = name; args[0] = "print"; } } Arguments cmd = Arguments.parse(args, sender); if (cmd == null) return false; acceptCommand(sender, cmd); return true; } @Override public List<String> onTabComplete(CommandSender sender, Command command, String alias, String[] args) { boolean suggestion = args[args.length - 1].isEmpty(); CommandReceiver.Arguments cmd = CommandReceiver.Arguments.parse(args, sender); if (cmd == null) return Collections.emptyList(); switch (cmd.length()) { case 0: return subCommandAttribute.entrySet().stream().filter(entry -> entry.getValue().startsWith("command")).map(Map.Entry::getKey).collect(Collectors.toList()); case 1: { String str = cmd.next(); if (suggestion) { if (ItemManager.getItem(str).isPresent()) { // we have a `/rpgitem item` and waiting a proper command return subCommandAttribute.entrySet().stream().filter(entry -> Stream.of("item", "power", "property").anyMatch(entry.getValue()::startsWith)).map(Map.Entry::getKey).collect(Collectors.toList()); } else { // we have a `/rpgitem command` and waiting a argument String attr = subCommandAttribute.get(str); if (attr == null) return Collections.emptyList(); if (attr.startsWith("command")) { // it's a global command and we have suggestion in attr String[] att = attr.split(":", 2); if (att.length > 1) { return Arrays.asList(att[1].split(",")); } return Collections.emptyList(); } else { // it's a item command, just items return new ArrayList<>(ItemManager.itemNames()); } } } else { // trying to complete a `/rpgitem com` or `/rpgitem ite` List<String> list = subCommands.keySet().stream().filter(s -> s.startsWith(str)).collect(Collectors.toList()); if (!list.isEmpty()) return list; return ItemManager.itemNames().stream().filter(s -> s.startsWith(str)).collect(Collectors.toList()); } } case 2: { String first = cmd.next(); String second = cmd.next(); if (suggestion) { // may be `/rpgitem item command` or `/rpgitem command item` Pair<RPGItem, String> itemCommand = resolveItemCommand(first, second); if (itemCommand == null) return Collections.emptyList(); // neither String attr = subCommandAttribute.get(itemCommand.getValue()); if (attr == null) return Collections.emptyList(); String[] att = attr.split(":", 2); switch (att[0]) { case "property": case "power": // four case below switch (itemCommand.getValue()) { case "power": return powers.keySet().stream().map(s -> PowerManager.hasExtension() ? s : s.getKey()).map(Object::toString).collect(Collectors.toList()); // all powers case "set": case "get": case "removepower": return itemCommand.getKey().getPowers().stream().map(itemCommand.getKey()::getPowerKey).map(s -> PowerManager.hasExtension() ? s : s.getKey()).map(Object::toString).collect(Collectors.toList()); // current powers default: return Collections.emptyList(); } case "item": case "command": { return att.length > 1 ? Arrays.asList(att[1].split(",")) : null; // suggestion bundled in attr } default: return null; } } else { if (ItemManager.getItem(first).isPresent()) { // trying to complete `/rpgitem item com` return subCommandAttribute.entrySet().stream() .filter(entry -> Stream.of("item", "power", "property").anyMatch(entry.getValue()::startsWith)) .map(Map.Entry::getKey) .filter(s -> s.startsWith(second)) .collect(Collectors.toList()); } else { // trying to complete `/rpgitem commmand argu` String attr = subCommandAttribute.get(first); if (attr == null) return Collections.emptyList(); String[] att = attr.split(":", 2); switch (att[0]) { case "property": case "power": case "item": { return ItemManager.itemNames().stream().filter(s -> s.startsWith(second)).collect(Collectors.toList()); // items } case "items": { return resolveSet(ItemManager.itemNames(), second); // items } case "command": { return att.length > 1 ? Arrays.stream(att[1].split(",")).filter(s -> s.startsWith(second)).collect(Collectors.toList()) : null; // bundled in attr } default: return Collections.emptyList(); } } } } case 3: { String first = cmd.next(); String second = cmd.next(); String third = cmd.next(); Pair<RPGItem, String> itemCommand = resolveItemCommand(first, second); if (itemCommand == null) return Collections.emptyList(); String attr = subCommandAttribute.get(itemCommand.getValue()); if (attr == null) return Collections.emptyList(); String[] att = attr.split(":", 2); if (suggestion) { return resolvePowerOrPropertySuggestion(sender, args); // only case is `/rpgitem power item somepower` } else { switch (att[0]) { case "property": case "power": switch (itemCommand.getValue()) { case "power": return powers.keySet().stream() .filter(s -> s.getKey().startsWith(third) || s.toString().startsWith(third)) .map(s -> PowerManager.hasExtension() ? s : s.getKey()) .map(Object::toString).collect(Collectors.toList()); // only case is `/rpgitem power item somepow` case "set": case "get": case "removepower": return itemCommand.getKey() .getPowers() .stream() .map(itemCommand.getKey()::getPowerKey) .filter(s -> s.getKey().startsWith(third) || s.toString().startsWith(third)) .map(s -> PowerManager.hasExtension() ? s : s.getKey()) .map(Object::toString) .collect(Collectors.toList()); // complete current powers default: return Collections.emptyList(); } case "item": { return att.length > 1 ? Arrays.stream(att[1].split(",")).filter(s -> s.startsWith(third)).collect(Collectors.toList()) : null; // bundled } default: return Collections.emptyList(); } } } default: { return resolvePowerOrPropertySuggestion(sender, args); } } } protected void msg(CommandSender target, @LangKey(varArgsPosition = 1) String template, Map<String, BaseComponent> map, Object... args) { new Message("").append(i18n.getFormatted(template, args), map).send(target); } @Override protected boolean showCompleteMessage() { return false; } }
/* * Copyright (C) 2015 The 8-Bit Bunch. Licensed under the Apache License, Version 1.1 * (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at <http://www.apache.org/licenses/LICENSE-1.1>. * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF * ANY KIND, either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package a2copy; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.webcodepro.applecommander.storage.DirectoryEntry; import com.webcodepro.applecommander.storage.Disk; import com.webcodepro.applecommander.storage.DiskFullException; import com.webcodepro.applecommander.storage.FileEntry; import com.webcodepro.applecommander.storage.FormattedDisk; import com.webcodepro.applecommander.storage.os.prodos.ProdosFileEntry; /* * This class uses AppleCommander's command-line interface to extract an entire * set of files and directories from an image, or build a whole image from * files and directories. * * Has specific hard-coded addresses for Lawless Legends files; should fix this * at some point to use a metadata configuration file or something like that. */ public class A2Copy { /* * Main command-line driver */ public static void main(String[] args) throws IOException, DiskFullException { try { if (args[0].equals("-get")) { getDirFromImg(args[1], args[2], args[3]); return; } if (args[0].equals("-put")) { putDirToImg(args[1], args[2], args[3]); return; } } catch (ArrayIndexOutOfBoundsException e) { } System.err.format("Usage: A2copy [-get imgFile srcImgDir destLocalDir] | [-put imgFile dstImgDir srcLocalDir]\n"); System.err.format(" where srcImgDir/dstImgDir is a subdirectory in the image, or / for the root directory.\n"); System.exit(1); } /** Patterns used for parsing filenames and paths */ static Pattern extPat = Pattern.compile("^(.*)\\.([^.]+)$"); static Pattern hashPat = Pattern.compile("^(.*)#([^#]+$)"); static Pattern pathPat = Pattern.compile("^/?([^/]+)(.*$)"); /** * Extract all the files and subdirectories from one directory in an image file, and * write them to the local filesystem. * * @throws IOException if something goes wrong * @throws DiskFullException this actually can't happen (because we're not creating dirs) */ static void getDirFromImg(String imgFile, String srcImgDir, String dstLocalDir) throws IOException, DiskFullException { // Create the local dir if necessary. File localDirFile = new File(dstLocalDir); localDirFile.mkdirs(); // Open the image file and get the disk inside it. FormattedDisk fd = new Disk(imgFile).getFormattedDisks()[0]; // Locate the right subdirectory on the disk. DirectoryEntry imgDir = findSubDir(fd, srcImgDir, false); // Recursively extract the files from that subdirectory. getAllFiles((List<FileEntry>)imgDir.getFiles(), localDirFile); } static DirectoryEntry findSubDir(DirectoryEntry imgDir, String subDirs, boolean create) throws DiskFullException { Matcher m = pathPat.matcher(subDirs); if (m.matches()) { // Process next component of the directory path. String subName = m.group(1); String remaining = m.group(2); for (FileEntry e : (List<FileEntry>)imgDir.getFiles()) { if (!e.isDeleted() && e.isDirectory() && e.getFilename().equalsIgnoreCase(subName)) return findSubDir((DirectoryEntry)e, remaining, create); } // Not found. If we're not allowed to create it, error out. if (!create) { System.err.format("Error: subdirectory '%s' not found.\n", subDirs); System.exit(2); } // Create the subdirectory and continue to sub-sub-dirs. return findSubDir(imgDir.createDirectory(subName.toUpperCase()), remaining, create); } return imgDir; } /** * Helper for file/directory extraction. * @param files set of files to extract * @param dstTargetDir where to put them * @throws IOException if something goes wrong */ static void getAllFiles(List<FileEntry> files, File dstTargetDir) throws IOException { // Ensure the target directory exists dstTargetDir.mkdir(); // Make a map of the existing filesystem files so we can match them. This way, // we can retain whatever case regime the user has established on the filesystem. // HashMap<String, File> existingFiles = new HashMap<>(); HashMap<String, String> baseMap = new HashMap<>(); for (File f: dstTargetDir.listFiles()) { if (!f.isFile()) continue; String name = f.getName(); existingFiles.put(name.toLowerCase(), f); Matcher m = hashPat.matcher(name); if (m.matches()) name = m.group(1); existingFiles.put(name.toLowerCase(), f); m = extPat.matcher(name); if (m.matches()) name = m.group(1); existingFiles.put(name.toLowerCase(), f); baseMap.put(f.getName(), name); } // Process each entry in the list for (FileEntry e : files) { // Skip deleted things if (e.isDeleted()) continue; // Determine the filename we should use locally. If there's a matching // file already here, use its base. // String baseName = e.getFilename().toLowerCase(); if (existingFiles.containsKey(baseName)) { File existingFile = existingFiles.get(baseName); baseName = baseMap.get(existingFile.getName()); existingFile.delete(); } // Recursively process sub-directories if (e.isDirectory()) { File subDir = new File(dstTargetDir, baseName); getAllFiles(((DirectoryEntry)e).getFiles(), subDir); continue; } // Add a hash for the file type. String outName = baseName + "." + e.getFiletype().toLowerCase(); // Add a hash for the address if this kind of entry uses one. if (e.needsAddress()) { int auxType = ((ProdosFileEntry)e).getAuxiliaryType(); outName = outName + "#" + Integer.toHexString(auxType); } // Ready to copy the data. byte[] data = e.getFileData(); try (FileOutputStream out = new FileOutputStream(new File(dstTargetDir, outName))) { out.write(data); } } } /** * Put a whole directory of files/subdirs from the local filesystem into a * subdirectory of an image file. * * @param imgFilePath path to the image file * @param dstImgDir subdirectory in the image file, or "/" for the root * @param srcLocalDir directory containing files and subdirs * @throws DiskFullException if the image file fills up * @throws IOException if something else goes wrong */ static void putDirToImg(String imgFilePath, String dstImgDir, String srcLocalDir) throws IOException, DiskFullException { // Make sure the local dir exists. File localDirFile = new File(srcLocalDir); if (!localDirFile.isDirectory()) { System.err.format("Error: Local directory '%s' not found.\n", srcLocalDir); System.exit(2); } // Open the image file. FormattedDisk fd = new Disk(imgFilePath).getFormattedDisks()[0]; // Get to the right sub-directory. DirectoryEntry ent = findSubDir(fd, dstImgDir, true); // And fill it up. putAllFiles(fd, ent, localDirFile); } /** * Helper for image creation. * * @param fd disk to insert files into * @param targetDir directory within the disk * @param srcDir filesystem directory to read * @throws DiskFullException if the image file fills up * @throws IOException if something else goes wrong */ private static void putAllFiles(FormattedDisk fd, DirectoryEntry targetDir, File srcDir) throws DiskFullException, IOException { // Process each file in the source directory for (File srcFile : srcDir.listFiles()) { if (srcFile.isDirectory()) { DirectoryEntry subDir = targetDir.createDirectory(srcFile.getName().toUpperCase()); putAllFiles(fd, subDir, srcFile); continue; } // Parse and strip the hash (address) and extension if any String name = srcFile.getName(); String hash = "0"; Matcher m = hashPat.matcher(name); if (m.matches()) { name = m.group(1); hash = m.group(2); } String ext = "0"; m = extPat.matcher(name); if (m.matches()) { name = m.group(1); ext = m.group(2); } // Create a new entry in the disk image for this file. FileEntry ent = targetDir.createFile(); ent.setFilename(name.toUpperCase()); // Set the file type using the extension we parsed above. ent.setFiletype(ext); // Set the address if we have one and this kind of file wants one. if (ent.needsAddress()) { try { ent.setAddress(Integer.parseInt(hash, 16)); } catch (NumberFormatException e) { /*pass*/ } } // Copy the file data FileInputStream in = new FileInputStream(srcFile); byte[] buf = new byte[(int) srcFile.length()]; int nRead = in.read(buf); if (nRead != srcFile.length()) throw new IOException(String.format("Error reading file '%s'", srcFile.toString())); ent.setFileData(buf); // And save the new entry. fd.save(); } } }
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.security.fra; import java.util.Map; import java.util.Set; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import org.threeten.bp.LocalDate; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.convention.frequency.Frequency; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityVisitor; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.master.security.SecurityDescription; import com.opengamma.util.money.Currency; /** * A security for FRAs. */ @BeanDefinition @SecurityDescription(type = ForwardRateAgreementSecurity.SECURITY_TYPE, description = "Forward rate agreement") public class ForwardRateAgreementSecurity extends FinancialSecurity { /** Serialization version. */ private static final long serialVersionUID = 1L; /** * The security type. */ public static final String SECURITY_TYPE = "ForwardRateAgreement"; /** * The currency. */ @PropertyDefinition(validate = "notNull") private Currency _currency; /** * The fixing calendars. */ @PropertyDefinition(validate = "notNull") private Set<ExternalId> _calendars; /** * The payment calendars. */ @PropertyDefinition() private Set<ExternalId> _paymentCalendars; /** * The start date. */ @PropertyDefinition(validate = "notNull") private LocalDate _startDate; /** * The end date. */ @PropertyDefinition(validate = "notNull") private LocalDate _endDate; /** * The fixing date. * * If fixing date is null, the ForwardRateAgreementDefinition will calculate the fixing date from the start date, the * index spot lag and the fixing calendar. */ @PropertyDefinition private LocalDate _fixingDate; /** * The rate. */ @PropertyDefinition private double _rate; /** * The amount. */ @PropertyDefinition private double _amount; /** * The underlying identifier. */ @PropertyDefinition(validate = "notNull") private ExternalId _underlyingId; /** * The underlying identifier. */ @PropertyDefinition(validate = "notNull") private Frequency _indexFrequency; /** * The day count convention */ @PropertyDefinition(validate = "notNull") private DayCount _dayCount; /** * The fixing business day convention */ @PropertyDefinition(validate = "notNull") private BusinessDayConvention _fixingBusinessDayConvention; /** * The fixing lag (generally 0 or 2) */ @PropertyDefinition(validate = "notNull") private Integer _fixingLag; ForwardRateAgreementSecurity() { //For builder super(SECURITY_TYPE); } /** * Creates an instance * * @param currency the currency, not null. * @param underlyingId the id of the underlying index (assumed Ibor), not null * @param indexFrequency the index frequency, not null * @param startDate the start date, not null * @param endDate the end date, not null * @param rate the rate * @param amount the amount (-ve if payer) * @param fixingDate the fixing date, if null, then calculate from the start date * @param dayCount the day count convention, not null * @param fixingBusinessDayConvention the business dya convention, not null * @param calendars the calendars to be used, not null * @param fixingLag the fixing lag */ public ForwardRateAgreementSecurity(Currency currency, ExternalId underlyingId, Frequency indexFrequency, LocalDate startDate, LocalDate endDate, double rate, double amount, LocalDate fixingDate, DayCount dayCount, BusinessDayConvention fixingBusinessDayConvention, Set<ExternalId> calendars, int fixingLag) { super(SECURITY_TYPE); setExternalIdBundle(ExternalIdBundle.EMPTY); setCurrency(currency); setStartDate(startDate); setEndDate(endDate); setRate(rate); setAmount(amount); setIndexFrequency(indexFrequency); setUnderlyingId(underlyingId); setFixingDate(fixingDate); setDayCount(dayCount); setFixingBusinessDayConvention(fixingBusinessDayConvention); setCalendars(calendars); setFixingLag(fixingLag); } /** * Creates an instance * * @param currency the currency, not null. * @param underlyingId the id of the underlying index (assumed Ibor), not null * @param indexFrequency the index frequency, not null * @param startDate the start date, not null * @param endDate the end date, not null * @param rate the rate * @param amount the amount (-ve if payer) * @param fixingDate the fixing date, if null, then calculate from the start date * @param dayCount the day count convention, not null * @param fixingBusinessDayConvention the business dya convention, not null * @param fixingCalendars the calendars to be used, not null * @param paymentCalendars the payment calendars, if null the fixing calendars will be used * @param fixingLag the fixing lag */ public ForwardRateAgreementSecurity(Currency currency, ExternalId underlyingId, Frequency indexFrequency, LocalDate startDate, LocalDate endDate, double rate, double amount, LocalDate fixingDate, DayCount dayCount, BusinessDayConvention fixingBusinessDayConvention, Set<ExternalId> fixingCalendars, Set<ExternalId> paymentCalendars, int fixingLag) { super(SECURITY_TYPE); setExternalIdBundle(ExternalIdBundle.EMPTY); setCurrency(currency); setStartDate(startDate); setEndDate(endDate); setRate(rate); setAmount(amount); setIndexFrequency(indexFrequency); setUnderlyingId(underlyingId); setFixingDate(fixingDate); setDayCount(dayCount); setFixingBusinessDayConvention(fixingBusinessDayConvention); setCalendars(fixingCalendars); setPaymentCalendars(paymentCalendars); setFixingLag(fixingLag); } /** * Creates an instance * * @param currency the currency, not null. * @param underlyingId the id of the underlying index (assumed Ibor), not null * @param indexFrequency the index frequency, not null * @param startDate the start date, not null * @param endDate the end date, not null * @param rate the rate * @param amount the amount (-ve if payer) * @param dayCount the day count convention, not null * @param fixingBusinessDayConvention the business dya convention, not null * @param fixingCalendars the calendars to be used, not null * @param paymentCalendars the payment calendars, if null the fixing calendars will be used * @param fixingLag the fixing lag */ public ForwardRateAgreementSecurity(Currency currency, ExternalId underlyingId, Frequency indexFrequency, LocalDate startDate, LocalDate endDate, double rate, double amount, DayCount dayCount, BusinessDayConvention fixingBusinessDayConvention, Set<ExternalId> fixingCalendars, Set<ExternalId> paymentCalendars, int fixingLag) { super(SECURITY_TYPE); setExternalIdBundle(ExternalIdBundle.EMPTY); setCurrency(currency); setStartDate(startDate); setEndDate(endDate); setRate(rate); setAmount(amount); setIndexFrequency(indexFrequency); setUnderlyingId(underlyingId); setDayCount(dayCount); setFixingBusinessDayConvention(fixingBusinessDayConvention); setCalendars(fixingCalendars); setPaymentCalendars(paymentCalendars); setFixingLag(fixingLag); } //------------------------------------------------------------------------- @Override public final <T> T accept(FinancialSecurityVisitor<T> visitor) { return visitor.visitForwardRateAgreementSecurity(this); } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code ForwardRateAgreementSecurity}. * @return the meta-bean, not null */ public static ForwardRateAgreementSecurity.Meta meta() { return ForwardRateAgreementSecurity.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(ForwardRateAgreementSecurity.Meta.INSTANCE); } @Override public ForwardRateAgreementSecurity.Meta metaBean() { return ForwardRateAgreementSecurity.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the currency. * @return the value of the property, not null */ public Currency getCurrency() { return _currency; } /** * Sets the currency. * @param currency the new value of the property, not null */ public void setCurrency(Currency currency) { JodaBeanUtils.notNull(currency, "currency"); this._currency = currency; } /** * Gets the the {@code currency} property. * @return the property, not null */ public final Property<Currency> currency() { return metaBean().currency().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the fixing calendars. * @return the value of the property, not null */ public Set<ExternalId> getCalendars() { return _calendars; } /** * Sets the fixing calendars. * @param calendars the new value of the property, not null */ public void setCalendars(Set<ExternalId> calendars) { JodaBeanUtils.notNull(calendars, "calendars"); this._calendars = calendars; } /** * Gets the the {@code calendars} property. * @return the property, not null */ public final Property<Set<ExternalId>> calendars() { return metaBean().calendars().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the payment calendars. * @return the value of the property */ public Set<ExternalId> getPaymentCalendars() { return _paymentCalendars; } /** * Sets the payment calendars. * @param paymentCalendars the new value of the property */ public void setPaymentCalendars(Set<ExternalId> paymentCalendars) { this._paymentCalendars = paymentCalendars; } /** * Gets the the {@code paymentCalendars} property. * @return the property, not null */ public final Property<Set<ExternalId>> paymentCalendars() { return metaBean().paymentCalendars().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the start date. * @return the value of the property, not null */ public LocalDate getStartDate() { return _startDate; } /** * Sets the start date. * @param startDate the new value of the property, not null */ public void setStartDate(LocalDate startDate) { JodaBeanUtils.notNull(startDate, "startDate"); this._startDate = startDate; } /** * Gets the the {@code startDate} property. * @return the property, not null */ public final Property<LocalDate> startDate() { return metaBean().startDate().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the end date. * @return the value of the property, not null */ public LocalDate getEndDate() { return _endDate; } /** * Sets the end date. * @param endDate the new value of the property, not null */ public void setEndDate(LocalDate endDate) { JodaBeanUtils.notNull(endDate, "endDate"); this._endDate = endDate; } /** * Gets the the {@code endDate} property. * @return the property, not null */ public final Property<LocalDate> endDate() { return metaBean().endDate().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the fixing date. * * If fixing date is null, the ForwardRateAgreementDefinition will calculate the fixing date from the start date, the * index spot lag and the fixing calendar. * @return the value of the property */ public LocalDate getFixingDate() { return _fixingDate; } /** * Sets the fixing date. * * If fixing date is null, the ForwardRateAgreementDefinition will calculate the fixing date from the start date, the * index spot lag and the fixing calendar. * @param fixingDate the new value of the property */ public void setFixingDate(LocalDate fixingDate) { this._fixingDate = fixingDate; } /** * Gets the the {@code fixingDate} property. * * If fixing date is null, the ForwardRateAgreementDefinition will calculate the fixing date from the start date, the * index spot lag and the fixing calendar. * @return the property, not null */ public final Property<LocalDate> fixingDate() { return metaBean().fixingDate().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the rate. * @return the value of the property */ public double getRate() { return _rate; } /** * Sets the rate. * @param rate the new value of the property */ public void setRate(double rate) { this._rate = rate; } /** * Gets the the {@code rate} property. * @return the property, not null */ public final Property<Double> rate() { return metaBean().rate().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the amount. * @return the value of the property */ public double getAmount() { return _amount; } /** * Sets the amount. * @param amount the new value of the property */ public void setAmount(double amount) { this._amount = amount; } /** * Gets the the {@code amount} property. * @return the property, not null */ public final Property<Double> amount() { return metaBean().amount().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the underlying identifier. * @return the value of the property, not null */ public ExternalId getUnderlyingId() { return _underlyingId; } /** * Sets the underlying identifier. * @param underlyingId the new value of the property, not null */ public void setUnderlyingId(ExternalId underlyingId) { JodaBeanUtils.notNull(underlyingId, "underlyingId"); this._underlyingId = underlyingId; } /** * Gets the the {@code underlyingId} property. * @return the property, not null */ public final Property<ExternalId> underlyingId() { return metaBean().underlyingId().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the underlying identifier. * @return the value of the property, not null */ public Frequency getIndexFrequency() { return _indexFrequency; } /** * Sets the underlying identifier. * @param indexFrequency the new value of the property, not null */ public void setIndexFrequency(Frequency indexFrequency) { JodaBeanUtils.notNull(indexFrequency, "indexFrequency"); this._indexFrequency = indexFrequency; } /** * Gets the the {@code indexFrequency} property. * @return the property, not null */ public final Property<Frequency> indexFrequency() { return metaBean().indexFrequency().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the day count convention * @return the value of the property, not null */ public DayCount getDayCount() { return _dayCount; } /** * Sets the day count convention * @param dayCount the new value of the property, not null */ public void setDayCount(DayCount dayCount) { JodaBeanUtils.notNull(dayCount, "dayCount"); this._dayCount = dayCount; } /** * Gets the the {@code dayCount} property. * @return the property, not null */ public final Property<DayCount> dayCount() { return metaBean().dayCount().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the fixing business day convention * @return the value of the property, not null */ public BusinessDayConvention getFixingBusinessDayConvention() { return _fixingBusinessDayConvention; } /** * Sets the fixing business day convention * @param fixingBusinessDayConvention the new value of the property, not null */ public void setFixingBusinessDayConvention(BusinessDayConvention fixingBusinessDayConvention) { JodaBeanUtils.notNull(fixingBusinessDayConvention, "fixingBusinessDayConvention"); this._fixingBusinessDayConvention = fixingBusinessDayConvention; } /** * Gets the the {@code fixingBusinessDayConvention} property. * @return the property, not null */ public final Property<BusinessDayConvention> fixingBusinessDayConvention() { return metaBean().fixingBusinessDayConvention().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the fixing lag (generally 0 or 2) * @return the value of the property, not null */ public Integer getFixingLag() { return _fixingLag; } /** * Sets the fixing lag (generally 0 or 2) * @param fixingLag the new value of the property, not null */ public void setFixingLag(Integer fixingLag) { JodaBeanUtils.notNull(fixingLag, "fixingLag"); this._fixingLag = fixingLag; } /** * Gets the the {@code fixingLag} property. * @return the property, not null */ public final Property<Integer> fixingLag() { return metaBean().fixingLag().createProperty(this); } //----------------------------------------------------------------------- @Override public ForwardRateAgreementSecurity clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { ForwardRateAgreementSecurity other = (ForwardRateAgreementSecurity) obj; return JodaBeanUtils.equal(getCurrency(), other.getCurrency()) && JodaBeanUtils.equal(getCalendars(), other.getCalendars()) && JodaBeanUtils.equal(getPaymentCalendars(), other.getPaymentCalendars()) && JodaBeanUtils.equal(getStartDate(), other.getStartDate()) && JodaBeanUtils.equal(getEndDate(), other.getEndDate()) && JodaBeanUtils.equal(getFixingDate(), other.getFixingDate()) && JodaBeanUtils.equal(getRate(), other.getRate()) && JodaBeanUtils.equal(getAmount(), other.getAmount()) && JodaBeanUtils.equal(getUnderlyingId(), other.getUnderlyingId()) && JodaBeanUtils.equal(getIndexFrequency(), other.getIndexFrequency()) && JodaBeanUtils.equal(getDayCount(), other.getDayCount()) && JodaBeanUtils.equal(getFixingBusinessDayConvention(), other.getFixingBusinessDayConvention()) && JodaBeanUtils.equal(getFixingLag(), other.getFixingLag()) && super.equals(obj); } return false; } @Override public int hashCode() { int hash = 7; hash = hash * 31 + JodaBeanUtils.hashCode(getCurrency()); hash = hash * 31 + JodaBeanUtils.hashCode(getCalendars()); hash = hash * 31 + JodaBeanUtils.hashCode(getPaymentCalendars()); hash = hash * 31 + JodaBeanUtils.hashCode(getStartDate()); hash = hash * 31 + JodaBeanUtils.hashCode(getEndDate()); hash = hash * 31 + JodaBeanUtils.hashCode(getFixingDate()); hash = hash * 31 + JodaBeanUtils.hashCode(getRate()); hash = hash * 31 + JodaBeanUtils.hashCode(getAmount()); hash = hash * 31 + JodaBeanUtils.hashCode(getUnderlyingId()); hash = hash * 31 + JodaBeanUtils.hashCode(getIndexFrequency()); hash = hash * 31 + JodaBeanUtils.hashCode(getDayCount()); hash = hash * 31 + JodaBeanUtils.hashCode(getFixingBusinessDayConvention()); hash = hash * 31 + JodaBeanUtils.hashCode(getFixingLag()); return hash ^ super.hashCode(); } @Override public String toString() { StringBuilder buf = new StringBuilder(448); buf.append("ForwardRateAgreementSecurity{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } @Override protected void toString(StringBuilder buf) { super.toString(buf); buf.append("currency").append('=').append(JodaBeanUtils.toString(getCurrency())).append(',').append(' '); buf.append("calendars").append('=').append(JodaBeanUtils.toString(getCalendars())).append(',').append(' '); buf.append("paymentCalendars").append('=').append(JodaBeanUtils.toString(getPaymentCalendars())).append(',').append(' '); buf.append("startDate").append('=').append(JodaBeanUtils.toString(getStartDate())).append(',').append(' '); buf.append("endDate").append('=').append(JodaBeanUtils.toString(getEndDate())).append(',').append(' '); buf.append("fixingDate").append('=').append(JodaBeanUtils.toString(getFixingDate())).append(',').append(' '); buf.append("rate").append('=').append(JodaBeanUtils.toString(getRate())).append(',').append(' '); buf.append("amount").append('=').append(JodaBeanUtils.toString(getAmount())).append(',').append(' '); buf.append("underlyingId").append('=').append(JodaBeanUtils.toString(getUnderlyingId())).append(',').append(' '); buf.append("indexFrequency").append('=').append(JodaBeanUtils.toString(getIndexFrequency())).append(',').append(' '); buf.append("dayCount").append('=').append(JodaBeanUtils.toString(getDayCount())).append(',').append(' '); buf.append("fixingBusinessDayConvention").append('=').append(JodaBeanUtils.toString(getFixingBusinessDayConvention())).append(',').append(' '); buf.append("fixingLag").append('=').append(JodaBeanUtils.toString(getFixingLag())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code ForwardRateAgreementSecurity}. */ public static class Meta extends FinancialSecurity.Meta { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code currency} property. */ private final MetaProperty<Currency> _currency = DirectMetaProperty.ofReadWrite( this, "currency", ForwardRateAgreementSecurity.class, Currency.class); /** * The meta-property for the {@code calendars} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<Set<ExternalId>> _calendars = DirectMetaProperty.ofReadWrite( this, "calendars", ForwardRateAgreementSecurity.class, (Class) Set.class); /** * The meta-property for the {@code paymentCalendars} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<Set<ExternalId>> _paymentCalendars = DirectMetaProperty.ofReadWrite( this, "paymentCalendars", ForwardRateAgreementSecurity.class, (Class) Set.class); /** * The meta-property for the {@code startDate} property. */ private final MetaProperty<LocalDate> _startDate = DirectMetaProperty.ofReadWrite( this, "startDate", ForwardRateAgreementSecurity.class, LocalDate.class); /** * The meta-property for the {@code endDate} property. */ private final MetaProperty<LocalDate> _endDate = DirectMetaProperty.ofReadWrite( this, "endDate", ForwardRateAgreementSecurity.class, LocalDate.class); /** * The meta-property for the {@code fixingDate} property. */ private final MetaProperty<LocalDate> _fixingDate = DirectMetaProperty.ofReadWrite( this, "fixingDate", ForwardRateAgreementSecurity.class, LocalDate.class); /** * The meta-property for the {@code rate} property. */ private final MetaProperty<Double> _rate = DirectMetaProperty.ofReadWrite( this, "rate", ForwardRateAgreementSecurity.class, Double.TYPE); /** * The meta-property for the {@code amount} property. */ private final MetaProperty<Double> _amount = DirectMetaProperty.ofReadWrite( this, "amount", ForwardRateAgreementSecurity.class, Double.TYPE); /** * The meta-property for the {@code underlyingId} property. */ private final MetaProperty<ExternalId> _underlyingId = DirectMetaProperty.ofReadWrite( this, "underlyingId", ForwardRateAgreementSecurity.class, ExternalId.class); /** * The meta-property for the {@code indexFrequency} property. */ private final MetaProperty<Frequency> _indexFrequency = DirectMetaProperty.ofReadWrite( this, "indexFrequency", ForwardRateAgreementSecurity.class, Frequency.class); /** * The meta-property for the {@code dayCount} property. */ private final MetaProperty<DayCount> _dayCount = DirectMetaProperty.ofReadWrite( this, "dayCount", ForwardRateAgreementSecurity.class, DayCount.class); /** * The meta-property for the {@code fixingBusinessDayConvention} property. */ private final MetaProperty<BusinessDayConvention> _fixingBusinessDayConvention = DirectMetaProperty.ofReadWrite( this, "fixingBusinessDayConvention", ForwardRateAgreementSecurity.class, BusinessDayConvention.class); /** * The meta-property for the {@code fixingLag} property. */ private final MetaProperty<Integer> _fixingLag = DirectMetaProperty.ofReadWrite( this, "fixingLag", ForwardRateAgreementSecurity.class, Integer.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, (DirectMetaPropertyMap) super.metaPropertyMap(), "currency", "calendars", "paymentCalendars", "startDate", "endDate", "fixingDate", "rate", "amount", "underlyingId", "indexFrequency", "dayCount", "fixingBusinessDayConvention", "fixingLag"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 575402001: // currency return _currency; case -1233097483: // calendars return _calendars; case -299417201: // paymentCalendars return _paymentCalendars; case -2129778896: // startDate return _startDate; case -1607727319: // endDate return _endDate; case 1255202043: // fixingDate return _fixingDate; case 3493088: // rate return _rate; case -1413853096: // amount return _amount; case -771625640: // underlyingId return _underlyingId; case -711571286: // indexFrequency return _indexFrequency; case 1905311443: // dayCount return _dayCount; case 502310560: // fixingBusinessDayConvention return _fixingBusinessDayConvention; case 871782053: // fixingLag return _fixingLag; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends ForwardRateAgreementSecurity> builder() { return new DirectBeanBuilder<ForwardRateAgreementSecurity>(new ForwardRateAgreementSecurity()); } @Override public Class<? extends ForwardRateAgreementSecurity> beanType() { return ForwardRateAgreementSecurity.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code currency} property. * @return the meta-property, not null */ public final MetaProperty<Currency> currency() { return _currency; } /** * The meta-property for the {@code calendars} property. * @return the meta-property, not null */ public final MetaProperty<Set<ExternalId>> calendars() { return _calendars; } /** * The meta-property for the {@code paymentCalendars} property. * @return the meta-property, not null */ public final MetaProperty<Set<ExternalId>> paymentCalendars() { return _paymentCalendars; } /** * The meta-property for the {@code startDate} property. * @return the meta-property, not null */ public final MetaProperty<LocalDate> startDate() { return _startDate; } /** * The meta-property for the {@code endDate} property. * @return the meta-property, not null */ public final MetaProperty<LocalDate> endDate() { return _endDate; } /** * The meta-property for the {@code fixingDate} property. * @return the meta-property, not null */ public final MetaProperty<LocalDate> fixingDate() { return _fixingDate; } /** * The meta-property for the {@code rate} property. * @return the meta-property, not null */ public final MetaProperty<Double> rate() { return _rate; } /** * The meta-property for the {@code amount} property. * @return the meta-property, not null */ public final MetaProperty<Double> amount() { return _amount; } /** * The meta-property for the {@code underlyingId} property. * @return the meta-property, not null */ public final MetaProperty<ExternalId> underlyingId() { return _underlyingId; } /** * The meta-property for the {@code indexFrequency} property. * @return the meta-property, not null */ public final MetaProperty<Frequency> indexFrequency() { return _indexFrequency; } /** * The meta-property for the {@code dayCount} property. * @return the meta-property, not null */ public final MetaProperty<DayCount> dayCount() { return _dayCount; } /** * The meta-property for the {@code fixingBusinessDayConvention} property. * @return the meta-property, not null */ public final MetaProperty<BusinessDayConvention> fixingBusinessDayConvention() { return _fixingBusinessDayConvention; } /** * The meta-property for the {@code fixingLag} property. * @return the meta-property, not null */ public final MetaProperty<Integer> fixingLag() { return _fixingLag; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 575402001: // currency return ((ForwardRateAgreementSecurity) bean).getCurrency(); case -1233097483: // calendars return ((ForwardRateAgreementSecurity) bean).getCalendars(); case -299417201: // paymentCalendars return ((ForwardRateAgreementSecurity) bean).getPaymentCalendars(); case -2129778896: // startDate return ((ForwardRateAgreementSecurity) bean).getStartDate(); case -1607727319: // endDate return ((ForwardRateAgreementSecurity) bean).getEndDate(); case 1255202043: // fixingDate return ((ForwardRateAgreementSecurity) bean).getFixingDate(); case 3493088: // rate return ((ForwardRateAgreementSecurity) bean).getRate(); case -1413853096: // amount return ((ForwardRateAgreementSecurity) bean).getAmount(); case -771625640: // underlyingId return ((ForwardRateAgreementSecurity) bean).getUnderlyingId(); case -711571286: // indexFrequency return ((ForwardRateAgreementSecurity) bean).getIndexFrequency(); case 1905311443: // dayCount return ((ForwardRateAgreementSecurity) bean).getDayCount(); case 502310560: // fixingBusinessDayConvention return ((ForwardRateAgreementSecurity) bean).getFixingBusinessDayConvention(); case 871782053: // fixingLag return ((ForwardRateAgreementSecurity) bean).getFixingLag(); } return super.propertyGet(bean, propertyName, quiet); } @SuppressWarnings("unchecked") @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case 575402001: // currency ((ForwardRateAgreementSecurity) bean).setCurrency((Currency) newValue); return; case -1233097483: // calendars ((ForwardRateAgreementSecurity) bean).setCalendars((Set<ExternalId>) newValue); return; case -299417201: // paymentCalendars ((ForwardRateAgreementSecurity) bean).setPaymentCalendars((Set<ExternalId>) newValue); return; case -2129778896: // startDate ((ForwardRateAgreementSecurity) bean).setStartDate((LocalDate) newValue); return; case -1607727319: // endDate ((ForwardRateAgreementSecurity) bean).setEndDate((LocalDate) newValue); return; case 1255202043: // fixingDate ((ForwardRateAgreementSecurity) bean).setFixingDate((LocalDate) newValue); return; case 3493088: // rate ((ForwardRateAgreementSecurity) bean).setRate((Double) newValue); return; case -1413853096: // amount ((ForwardRateAgreementSecurity) bean).setAmount((Double) newValue); return; case -771625640: // underlyingId ((ForwardRateAgreementSecurity) bean).setUnderlyingId((ExternalId) newValue); return; case -711571286: // indexFrequency ((ForwardRateAgreementSecurity) bean).setIndexFrequency((Frequency) newValue); return; case 1905311443: // dayCount ((ForwardRateAgreementSecurity) bean).setDayCount((DayCount) newValue); return; case 502310560: // fixingBusinessDayConvention ((ForwardRateAgreementSecurity) bean).setFixingBusinessDayConvention((BusinessDayConvention) newValue); return; case 871782053: // fixingLag ((ForwardRateAgreementSecurity) bean).setFixingLag((Integer) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } @Override protected void validate(Bean bean) { JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._currency, "currency"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._calendars, "calendars"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._startDate, "startDate"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._endDate, "endDate"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._underlyingId, "underlyingId"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._indexFrequency, "indexFrequency"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._dayCount, "dayCount"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._fixingBusinessDayConvention, "fixingBusinessDayConvention"); JodaBeanUtils.notNull(((ForwardRateAgreementSecurity) bean)._fixingLag, "fixingLag"); super.validate(bean); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
/* * ==================================================================== * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package test; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.ogf.saga.context.Context; import org.ogf.saga.context.ContextFactory; import org.ogf.saga.error.NotImplementedException; import org.ogf.saga.error.IncorrectStateException; import org.ogf.saga.error.NoSuccessException; import org.ogf.saga.error.PermissionDeniedException; import org.ogf.saga.error.SagaException; import org.ogf.saga.session.Session; import org.ogf.saga.session.SessionFactory; import org.ogf.saga.job.JobDescription; import org.ogf.saga.job.JobService; import org.ogf.saga.job.JobFactory; import org.ogf.saga.job.Job; import org.ogf.saga.task.State; import org.ogf.saga.url.URL; import org.ogf.saga.url.URLFactory; import fr.in2p3.jsaga.impl.job.instance.JobImpl; import fr.in2p3.jsaga.impl.job.service.JobServiceImpl; import java.math.BigInteger; import java.util.Random; import org.apache.log4j.Logger; /* ********************************************* * *** Istituto Nazionale di Fisica Nucleare *** * *** Sezione di Catania (Italy) *** * *** http://www.ct.infn.it/ *** * ********************************************* * File: jOCCIJobControlAdaptor.java * Authors: Giuseppe LA ROCCA * Email: <giuseppe.larocca>@ct.infn.it * Ver.: 1.0.4 * Date: 27 October 2015 * *********************************************/ public class RunTest { private static String OCCI_ENDPOINT_HOST = ""; private static String OCCI_ENDPOINT_PORT = ""; private static String OCCI_OS = ""; private static String OCCI_FLAVOR = ""; private static String OCCI_ACTION = ""; //private static String OCCI_RESOURCE = ""; private static String OCCI_RESOURCE_ID = ""; private static String OCCI_VM_TITLE = ""; private static String OCCI_PROXY_PATH = ""; //private static String OCCI_PREFIX = "/usr/local/rvm/gems/ruby-1.9.3-p429/bin"; private static String OCCI_PROTOCOL = ""; // Adding FedCloud Contextualisation options here private static String OCCI_CONTEXT_PUBLICKEY = ""; private static String OCCI_CONTEXT_PUBLICKEY_NAME = ""; private static String OCCI_PUBLIC_NETWORK_ID = ""; private static Logger log = Logger.getLogger(RunTest.class); public static String getNativeJobId(String jobId) { String nativeJobId = ""; Pattern pattern = Pattern.compile("\\[(.*)\\]-\\[(.*)\\]"); Matcher matcher = pattern.matcher(jobId); try { if (matcher.find()) nativeJobId = matcher.group(2); else return null; } catch (Exception ex) { System.out.println(ex.toString()); return null; } return nativeJobId; } public static void main(String[] args) throws NotImplementedException { System.setProperty("saga.factory", "fr.in2p3.jsaga.impl.SagaFactoryImpl"); Session session = null; Context context = null; String ServiceURL = ""; JobService service = null; Job job = null; String jobId = ""; // Possible values: 'true' and 'false' OCCI_CONTEXT_PUBLICKEY = "true"; // Possible values: 'centos', 'ubuntu', 'root', 'cloud-user', ... OCCI_CONTEXT_PUBLICKEY_NAME = "centos"; // Public Network ID OCCI_PUBLIC_NETWORK_ID = "public"; // OCCI_PROXY_PATH (fedcloud.egi.vo) OCCI_PROXY_PATH = System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "x509up_u512"; // OCCI_PROXY_PATH (vo.chain-project.eu) /*OCCI_PROXY_PATH = System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "x509up_u501";*/ // OCCI_PROXY_PATH (trainig.egi.eu) /*OCCI_PROXY_PATH = System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "x509up_u500";*/ try { //Create an empty SAGA session log.info("\nInitialize the security context for the jOCCI JSAGA adaptor"); session = SessionFactory.createSession(false); //Modifiy this section according to the A&A schema of your middleware //In this example the jocci A&A schema is used context = ContextFactory.createContext("jocci"); // Set the user proxy context.setAttribute(Context.USERPROXY, OCCI_PROXY_PATH); //Set the public key for SSH connections context.setAttribute(Context.USERCERT, System.getProperty("user.home") + System.getProperty("file.separator") + ".ssh/id_rsa.pub"); //Set the private key for SSH connections context.setAttribute(Context.USERKEY, System.getProperty("user.home") + System.getProperty("file.separator") + ".ssh/id_rsa"); // Set the userID for SSH connections //context.setAttribute(Context.USERID, "root"); context.setAttribute(Context.USERID, OCCI_CONTEXT_PUBLICKEY_NAME); session.addContext(context); if (Integer.parseInt (context.getAttribute(Context.LIFETIME))/3600 > 0) { log.info(""); log.info("Initializing the security context for the jOCCI JSAGA adaptor [ SUCCESS ] "); log.info("See below security context details... "); log.info("User DN = " + context.getAttribute(Context.USERID)); log.info("Proxy = " + context.getAttribute(Context.USERPROXY)); log.info("Lifetime = " + Integer.parseInt (context.getAttribute(Context.LIFETIME)) / 3600 + "h."); log.info("CA Repos = " + context.getAttribute(Context.CERTREPOSITORY)); log.info("Type = " + context.getAttribute(Context.TYPE)); log.info("VO name = " + context.getAttribute(Context.USERVO)); } else throw new RuntimeException ("Your credentials have expired!"); } catch (Exception ex) { log.error(""); log.error("Initialize the Security context [ FAILED ] "); log.error("See below the stack trace... "); ex.printStackTrace(System.out); System.exit(-1); } // === OCCI SETTINGS for the CESNET CLOUD RESOURCE === // //OCCI_ENDPOINT_HOST = "jocci://carach5.ics.muni.cz"; //OCCI_ENDPOINT_PORT = "11443"; //OCCI_PROTOCOL = "https://"; // vo.chain-project.eu // os_tpl#uuid_chain_reds_tthreader_fedcloud_dukan_104 // os_tpl#uuid_chain_reds_aleph2000_fedcloud_dukan_105 // os_tpl#uuid_chain_reds_generic_vm_fedcloud_dukan_100 // os_tpl#uuid_chain_reds_octave_fedcloud_dukan_101 // os_tpl#uuid_chain_reds_r_fedcloud_dukan_102 // os_tpl#uuid_chain_reds_generic_www_fedcloud_dukan_110 // os_tpl#uuid_chain_reds_wrf_fedcloud_dukan_103 //OCCI_OS = "uuid_chain_reds_octave_fedcloud_dukan_101"; // fedcloud.egi.eu //OCCI_OS = "uuid_egi_centos_6_fedcloud_warg_130"; //OCCI_FLAVOR = "small"; // === OCCI SETTINGS for the CATANIA CLOUD RESOURCE === // //OCCI_ENDPOINT_HOST = "jocci://nebula-server-01.ct.infn.it"; //OCCI_ENDPOINT_PORT = "9000"; //OCCI_PROTOCOL = "https://"; // vo.chain-project.eu // os_tpl#uuid_chain_reds_generic_vm_centos_6_6_kvm_103 // os_tpl#uuid_chain_reds_octave_centos_6_6_kvm_102 // os_tpl#uuid_chain_reds_octave_centos_6_6_kvm_102 // os_tpl#uuid_chain_reds_wrf_centos_6_6_kvm_105 // os_tpl#uuid_chain_reds_tthreader_scientific_linux_6_5_kvm_108 // os_tpl#uuid_chain_reds_generic_www_centos_6_6_kvm_106 // os_tpl#uuid_chain_reds_aleph2000_scientific_linux_slc5_11_kvm_104 //OCCI_OS = "uuid_chain_reds_generic_vm_centos_6_6_kvm_103"; // fedcloud.egi.eu // os_tpl#uuid_centos6_minimal_centos_6_x_kvm_130 // os_tpl#uuid_cernvm_scientificlinux_6_0_kvm_119 //OCCI_OS = "uuid_cernvm_scientificlinux_6_0_kvm_119"; //OCCI_OS = "uuid_centos6_minimal_centos_6_x_kvm_130"; //OCCI_FLAVOR = "medium"; // === OCCI SETTINGS for the INFN-BARI CLOUD RESOURCE === // //OCCI_ENDPOINT_HOST = "jocci://prisma-cloud.ba.infn.it"; //OCCI_ENDPOINT_PORT = "8787"; //OCCI_PROTOCOL = "http://"; // Possible OCCI_OS values: 'generic_vm', 'octave', 'r', 'WRF', 'treethreader', 'aleph2000' // 623a86f7-f5f9-4bc7-816a-80e7bd6603ed => 'generic-vm' // 4aca9ee4-8638-4f95-824f-5128e8b0e90f => 'octave' // 217535d6-7315-4cb7-bc40-2aa20cfef60b => 'r' // a82fb047-5932-4b70-9099-67865e8b88f0 -> 'generic_www' // 30d841c8-fbf5-44d2-bdc9-f49df1bba2dd => 'treethreader' // b0df0319-5b5b-41fb-9453-2b578ee875fd => 'WRF' // 5f29ab3e-61f3-4f94-815f-3d6bf7a90704 => 'aleph2000' // 56c11ccb-c696-4fe6-b061-b5df24913580 => 'generic_www' //OCCI_OS = "38e758ec-0f2c-4cd2-8f2c-40e48c3ed62e"; //OCCI_OS = "623a86f7-f5f9-4bc7-816a-80e7bd6603ed"; //OCCI_FLAVOR = "small"; // === OCCI SETTINGS for the CIEMAT CLOUD RESOURCE === // OCCI_ENDPOINT_HOST = "jocci://cloud.cesga.es"; OCCI_ENDPOINT_PORT = "3202"; OCCI_PROTOCOL = "https://"; //OCCI_OS = "uuid_basic_centos_6_minimal_271"; OCCI_OS = "uuid_centos5_7_90"; OCCI_FLAVOR = "small"; // === OCCI SETTINGS for INFN-PADOVA-STACK CLOUD RESOURCE === // /*OCCI_ENDPOINT_HOST = "jocci://egi-cloud.pd.infn.it"; OCCI_ENDPOINT_PORT = "8787"; OCCI_PROTOCOL = "https://"; //OCCI_OS = "55f18599-e863-491a-83d4-28823b0345c0"; // [Debian/7/KVM]_fctf OCCI_OS = "556b231f-1daf-4bbf-b172-fb950da9c330"; //[CentOS/6/KVM]_fctf OCCI_FLAVOR = "m1-small";*/ /*OCCI_ENDPOINT_HOST = "jocci://controller.ceta-ciemat.es"; OCCI_ENDPOINT_PORT = "8787"; OCCI_PROTOCOL = "https://"; OCCI_OS = "d8145afb-f820-44d6-96a9-f491939868da"; OCCI_FLAVOR = "m1-small";*/ /* [ controller.ceta-ciemat.es ] * CHAIN-REDS * 225b8e1b-7403-402c-a63f-1ecbbb747db0 => generic_vm * e0ec1fce-7ff0-410d-b923-bafe90818fe4 => r * 6c4328f9-71db-457f-81d1-1b775c721a23 => octave * FEDCLOUD * d8145afb-f820-44d6-96a9-f491939868da => Ubuntu/12.04 */ /*OCCI_ENDPOINT_HOST = "jocci://stack-server-02.ct.infn.it"; OCCI_ENDPOINT_PORT = "8787"; OCCI_PROTOCOL = "http://"; // - [CentOS/6/KVM]_EGI_fedcloud (OK), OCCI_PUBLICKEY_NAME=centos OCCI_OS = "2a612491-d544-4b9c-af3f-994ac7f61d2a"; // - [Ubuntu/14.04/KVM]_EGI_fedcloud (OK), OCCI_PUBLICKEY_NAME=ubuntu //OCCI_OS = "74f88b3c-a70f-4c2d-9df6-8b2ba766701f"; OCCI_FLAVOR = "m1-medium";*/ OCCI_VM_TITLE = "jOCCI_"; OCCI_ACTION = "create"; BigInteger result = new BigInteger(30, new Random()); OCCI_VM_TITLE += result; try { log.info(""); log.info("Initialize the JobService context... "); // Start OCCI Actions ... if (OCCI_ACTION.equals("list")) { ServiceURL = OCCI_ENDPOINT_HOST + ":" + OCCI_ENDPOINT_PORT + System.getProperty("file.separator") + "?" + "protocol=" + OCCI_PROTOCOL + "&network=" + OCCI_PUBLIC_NETWORK_ID + "&credentials_publickey=" + OCCI_CONTEXT_PUBLICKEY + "&credentials_publickey_name=" + OCCI_CONTEXT_PUBLICKEY_NAME + "&proxy_path=" + OCCI_PROXY_PATH; } else if (OCCI_ACTION.equals("create")) { ServiceURL = OCCI_ENDPOINT_HOST + ":" + OCCI_ENDPOINT_PORT + System.getProperty("file.separator") + "?" + "protocol=" + OCCI_PROTOCOL + "&network=" + OCCI_PUBLIC_NETWORK_ID + "&attributes_title=" + OCCI_VM_TITLE + "&mixin_os_tpl=" + OCCI_OS + "&mixin_resource_tpl=" + OCCI_FLAVOR + "&credentials_publickey=" + OCCI_CONTEXT_PUBLICKEY + "&credentials_publickey_name=" + OCCI_CONTEXT_PUBLICKEY_NAME + "&proxy_path=" + OCCI_PROXY_PATH; } else if (OCCI_ACTION.equals("describe")) { ServiceURL = OCCI_ENDPOINT_HOST + ":" + OCCI_ENDPOINT_PORT + System.getProperty("file.separator") + "?" + "protocol=" + OCCI_PROTOCOL + "&network=" + OCCI_PUBLIC_NETWORK_ID + "&resourceID=" + OCCI_RESOURCE_ID + "&credentials_publickey=" + OCCI_CONTEXT_PUBLICKEY + "&credentials_publickey_name=" + OCCI_CONTEXT_PUBLICKEY_NAME + "&proxy_path=" + OCCI_PROXY_PATH; } else if (OCCI_ACTION.equals("delete")) { ServiceURL = OCCI_ENDPOINT_HOST + ":" + OCCI_ENDPOINT_PORT + System.getProperty("file.separator") + "?" + "protocol=" + OCCI_PROTOCOL + "&network=" + OCCI_PUBLIC_NETWORK_ID + "&resourceID=" + OCCI_RESOURCE_ID + "&credentials_publickey=" + OCCI_CONTEXT_PUBLICKEY + "&credentials_publickey_name=" + OCCI_CONTEXT_PUBLICKEY_NAME + "&proxy_path=" + OCCI_PROXY_PATH; } URL serviceURL = URLFactory.createURL(ServiceURL); log.info("serviceURL = " + serviceURL); service = JobFactory.createJobService(session, serviceURL); // ========================================== // // === SUBMITTING VM using jOCCI standard === // // ========================================== // if (OCCI_ACTION.equals("create")) { // Create the job description JobDescription desc = JobFactory.createJobDescription(); desc.setAttribute(JobDescription.EXECUTABLE, "/bin/bash"); desc.setAttribute(JobDescription.OUTPUT, "output.txt"); desc.setAttribute(JobDescription.ERROR, "error.txt"); desc.setVectorAttribute(JobDescription.ARGUMENTS, new String[]{"job-generic.sh"}); desc.setVectorAttribute( JobDescription.FILETRANSFER, new String[]{ System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "job-generic.sh>job-generic.sh", System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "output.txt<output.txt", System.getProperty("user.home") + System.getProperty("file.separator") + "jsaga-adaptor-jocci" + System.getProperty("file.separator") + "error.txt<error.txt"} ); // ================================= // // === CREATE a new job instance === // // ================================= // job = service.createJob(desc); job.run(); // Getting the jobId jobId = job.getAttribute(Job.JOBID); log.info(""); log.info("Job instance created: "); log.info(jobId); try { ((JobServiceImpl)service).disconnect(); } catch (NoSuccessException ex) { log.error("See below the stack trace... "); ex.printStackTrace(System.out); } log.info(""); log.info("Closing session..."); session.close(); // =========================== // // === CHECKING job status === // // =========================== // //Create an empty SAGA session log.info(""); log.info("Re-initialize the security context for the jOCCI JSAGA adaptor"); session = SessionFactory.createSession(false); //Modifiy this section according to the A&A schema of your middleware //In this example the jocci A&A schema is used context = ContextFactory.createContext("jocci"); // Set the user proxy context.setAttribute(Context.USERPROXY, OCCI_PROXY_PATH); //Set the public key for SSH connections context.setAttribute(Context.USERCERT, System.getProperty("user.home") + System.getProperty("file.separator") + ".ssh/id_rsa.pub"); //Set the private key for SSH connections context.setAttribute(Context.USERKEY, System.getProperty("user.home") + System.getProperty("file.separator") + ".ssh/id_rsa"); // Set the userID for SSH connections //context.setAttribute(Context.USERID, "root"); context.setAttribute(Context.USERID, OCCI_CONTEXT_PUBLICKEY_NAME); session.addContext(context); ServiceURL = OCCI_ENDPOINT_HOST + ":" + OCCI_ENDPOINT_PORT + System.getProperty("file.separator") + "?" + "protocol=" + OCCI_PROTOCOL + "&network=" + OCCI_PUBLIC_NETWORK_ID + "&action=" + OCCI_ACTION + "&attributes_title=" + OCCI_VM_TITLE + "&mixin_os_tpl=" + OCCI_OS + "&mixin_resource_tpl=" + OCCI_FLAVOR + "&credentials_publickey=" + OCCI_CONTEXT_PUBLICKEY + "&credentials_publickey_name=" + OCCI_CONTEXT_PUBLICKEY_NAME + "&proxy_path=" + OCCI_PROXY_PATH; serviceURL = URLFactory.createURL(ServiceURL); JobService service1 = JobFactory.createJobService(session, serviceURL); Job job1 = service1.getJob(getNativeJobId(jobId)); log.info(""); log.info("Fetching the status of the job "); log.info ("[ " + getNativeJobId(jobId) + " ] "); log.info(""); log.info("JobID [ " + jobId + " ] "); boolean jobIsDone = false; //String nativeJobId = ""; while(!jobIsDone) { // display final state State state = null; try { state = job1.getState(); log.info("Current Status = " + state.name()); String executionHosts[]; executionHosts = job1.getVectorAttribute(Job.EXECUTIONHOSTS); log.info("Execution Host = " + executionHosts[0]); } catch (Exception ex) { log.error(""); log.error("Error in getting job status... [ FAILED ] "); log.error(ex.toString()); log.error("Cause :" + ex.getCause()); } if (State.CANCELED.compareTo(state) == 0) { log.info(""); log.info("Job Status = CANCELED "); } else { if (State.DONE.compareTo(state) == 0) { jobIsDone = true; String exitCode = job1.getAttribute(Job.EXITCODE); log.info(""); log.info("Final Job Status = DONE"); if (Integer.parseInt (exitCode) == 0) log.info("Exit Code (0) [ SUCCESS ] "); else log.info("Exit Code [ " + exitCode + " ] "); log.info(""); log.info("Retrieving job results."); log.info("This operation may take a few minutes to complete..."); // ========================================== // // === EXECUTING post-staging and cleanup === // // ========================================== // try { ((JobImpl)job1).postStagingAndCleanup(); } catch (NotImplementedException ex) { ex.printStackTrace(System.out); } catch (PermissionDeniedException ex) { ex.printStackTrace(System.out); } catch (IncorrectStateException ex) { ex.printStackTrace(System.out); } catch (NoSuccessException ex) { ex.printStackTrace(System.out); } log.info("Job outputs retrieved [ SUCCESS ] "); try { ((JobServiceImpl)service1).disconnect(); } catch (NoSuccessException ex) { log.error("Job outputs retrieved [ FAILED ] "); log.error("See below the stack trace... "); ex.printStackTrace(System.out); } session.close(); break; } // end Sate.DONE else if (State.FAILED.compareTo(state) == 0) { try { String exitCode = job1.getAttribute(Job.EXITCODE); log.info(""); log.info("Job Status = FAILED"); log.info("Exit Code [ " + exitCode + " ] "); } catch (SagaException e) { log.error("Job failed."); } } else { log.info(""); log.info("Unexpected job status: " + state); } } try { Thread.sleep(10000); } catch (InterruptedException ex) { ex.printStackTrace(System.out); } } // end while } // end if (OCCI_ACTION.equals("create")) log.info(""); log.info("Initialize the JobService context [ SUCCESS ] "); } catch (Exception ex) { log.error(""); log.error("Initialize the JobService context [ FAILED ] "); log.error("See below the stack trace... "); ex.printStackTrace(System.out); } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.progress.impl; import com.intellij.codeInsight.daemon.impl.DaemonProgressIndicator; import com.intellij.concurrency.JobScheduler; import com.intellij.concurrency.SensitiveProgressWrapper; import com.intellij.ide.util.DelegatingProgressIndicator; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.application.ex.ApplicationManagerEx; import com.intellij.openapi.progress.*; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.progress.util.ProgressIndicatorUtils; import com.intellij.openapi.progress.util.ProgressWrapper; import com.intellij.openapi.progress.util.ReadTask; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.wm.ex.ProgressIndicatorEx; import com.intellij.testFramework.BombedProgressIndicator; import com.intellij.testFramework.LightPlatformTestCase; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.util.Alarm; import com.intellij.util.ArrayUtil; import com.intellij.util.TimeoutUtil; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.DoubleArrayList; import com.intellij.util.containers.Stack; import com.intellij.util.ui.UIUtil; import gnu.trove.TLongArrayList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * @author yole */ public class ProgressIndicatorTest extends LightPlatformTestCase { public void testCheckCanceledHasStackFrame() { ProgressIndicator pib = new ProgressIndicatorBase(); pib.cancel(); try { pib.checkCanceled(); fail("Please restore ProgressIndicatorBase.checkCanceled() check!"); } catch(ProcessCanceledException ex) { boolean hasStackFrame = ex.getStackTrace().length != 0; assertTrue("Should have stackframe", hasStackFrame); } } public void testProgressManagerCheckCanceledWorksRightAfterIndicatorBeenCanceled() { for (int i=0; i<1000;i++) { final ProgressIndicatorBase indicator = new ProgressIndicatorBase(); ProgressManager.getInstance().runProcess(() -> { ProgressManager.checkCanceled(); try { indicator.cancel(); ProgressManager.checkCanceled(); fail("checkCanceled() must have caught just canceled indicator"); } catch (ProcessCanceledException ignored) { } }, indicator); } } private volatile long prevTime; private volatile long now; public void testCheckCanceledGranularity() throws InterruptedException { prevTime = now = 0; final long warmupEnd = System.currentTimeMillis() + 1000; final TLongArrayList times = new TLongArrayList(); final long end = warmupEnd + 1000; ApplicationManagerEx.getApplicationEx().runProcessWithProgressSynchronously(() -> { final Alarm alarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, getTestRootDisposable()); ProgressIndicatorEx indicator = (ProgressIndicatorEx)ProgressIndicatorProvider.getGlobalProgressIndicator(); prevTime = System.currentTimeMillis(); assert indicator != null; indicator.addStateDelegate(new ProgressIndicatorStub() { @Override public void checkCanceled() throws ProcessCanceledException { now = System.currentTimeMillis(); if (now > warmupEnd) { int delta = (int)(now - prevTime); times.add(delta); } prevTime = now; } }); while (System.currentTimeMillis() < end) { ProgressManager.checkCanceled(); } alarm.cancelAllRequests(); }, "", false, getProject(), null, ""); long averageDelay = ArrayUtil.averageAmongMedians(times.toNativeArray(), 5); System.out.println("averageDelay = " + averageDelay); assertTrue(averageDelay < CoreProgressManager.CHECK_CANCELED_DELAY_MILLIS *3); } public void testProgressIndicatorUtilsScheduleWithWriteActionPriority() throws Throwable { final AtomicBoolean insideReadAction = new AtomicBoolean(); final ProgressIndicatorBase indicator = new ProgressIndicatorBase(); ProgressIndicatorUtils.scheduleWithWriteActionPriority(indicator, new ReadTask() { @Override public void computeInReadAction(@NotNull ProgressIndicator indicator) { insideReadAction.set(true); while (true) { ProgressManager.checkCanceled(); } } @Override public void onCanceled(@NotNull ProgressIndicator indicator) { } }); UIUtil.dispatchAllInvocationEvents(); while (!insideReadAction.get()) { } ApplicationManager.getApplication().runWriteAction(() -> assertTrue(indicator.isCanceled())); assertTrue(indicator.isCanceled()); } public void testReadTaskCanceledShouldNotHappenAfterEdtContinuation() { for (int i = 0; i < 1000; i++) { final AtomicBoolean afterContinuation = new AtomicBoolean(); final ProgressIndicatorBase indicator = new ProgressIndicatorBase(); ProgressIndicatorUtils.scheduleWithWriteActionPriority(indicator, new ReadTask() { @Nullable @Override public Continuation performInReadAction(@NotNull ProgressIndicator indicator) throws ProcessCanceledException { return new Continuation(() -> afterContinuation.set(true)); } @Override public void onCanceled(@NotNull ProgressIndicator indicator) { assertFalse(afterContinuation.get()); } }); UIUtil.dispatchAllInvocationEvents(); ApplicationManager.getApplication().runWriteAction(() -> { if (!afterContinuation.get()) { assertTrue(indicator.isCanceled()); } }); UIUtil.dispatchAllInvocationEvents(); } } public void testThereIsNoDelayBetweenIndicatorCancelAndProgressManagerCheckCanceled() throws Throwable { for (int i=0; i<100;i++) { final ProgressIndicatorBase indicator = new ProgressIndicatorBase(); List<Thread> threads = ContainerUtil.map(Collections.nCopies(10, ""), s -> new Thread(() -> ProgressManager.getInstance().executeProcessUnderProgress(() -> { try { Thread.sleep(new Random().nextInt(100)); indicator.cancel(); ProgressManager.checkCanceled(); fail("checkCanceled() must know about canceled indicator even from different thread"); } catch (ProcessCanceledException ignored) { } catch (Throwable e) { exception = e; } }, indicator), "indicator test")); threads.forEach(Thread::start); for (Thread thread : threads) { thread.join(); } } if (exception != null) throw exception; } private volatile boolean checkCanceledCalled; private volatile boolean taskCanceled; private volatile boolean taskSucceeded; private volatile Throwable exception; public void testProgressManagerCheckCanceledDoesNotDelegateToProgressIndicatorIfThereAreNoCanceledIndicators() throws Throwable { final long warmupEnd = System.currentTimeMillis() + 1000; final long end = warmupEnd + 10000; checkCanceledCalled = false; final ProgressIndicatorBase myIndicator = new ProgressIndicatorBase(); taskCanceled = taskSucceeded = false; exception = null; Future<?> future = ((ProgressManagerImpl)ProgressManager.getInstance()).runProcessWithProgressAsynchronously( new Task.Backgroundable(getProject(), "Xxx") { @Override public void run(@NotNull ProgressIndicator indicator) { try { assertFalse(ApplicationManager.getApplication().isDispatchThread()); assertSame(indicator, myIndicator); while (System.currentTimeMillis() < end) { ProgressManager.checkCanceled(); } } catch (ProcessCanceledException e) { exception = e; checkCanceledCalled = true; throw e; } catch (RuntimeException | Error e) { exception = e; throw e; } } @Override public void onCancel() { taskCanceled = true; } @Override public void onSuccess() { taskSucceeded = true; } }, myIndicator, null); ApplicationManager.getApplication().assertIsDispatchThread(); while (!future.isDone()) { if (System.currentTimeMillis() < warmupEnd) { assertFalse(checkCanceledCalled); } else { myIndicator.cancel(); } } // invokeLater in runProcessWithProgressAsynchronously UIUtil.dispatchAllInvocationEvents(); assertTrue(checkCanceledCalled); assertFalse(taskSucceeded); assertTrue(taskCanceled); assertTrue(String.valueOf(exception), exception instanceof ProcessCanceledException); } private volatile boolean myFlag; public void testPerverseIndicator() { checkCanceledCalled = false; ProgressIndicator indicator = new ProgressIndicatorStub() { @Override public void checkCanceled() throws ProcessCanceledException { checkCanceledCalled = true; if (myFlag) throw new ProcessCanceledException(); } }; ensureCheckCanceledCalled(indicator); } private void ensureCheckCanceledCalled(@NotNull ProgressIndicator indicator) { myFlag = false; JobScheduler.getScheduler().schedule(() -> myFlag = true, 100, TimeUnit.MILLISECONDS); final long start = System.currentTimeMillis(); try { ProgressManager.getInstance().executeProcessUnderProgress(() -> { while (System.currentTimeMillis() - start < 10000) { ProgressManager.checkCanceled(); } }, indicator); fail("must have thrown PCE"); } catch (ProcessCanceledException e) { assertTrue(checkCanceledCalled); } } public void testExtremelyPerverseIndicatorWhichCancelMethodIsNoop() { checkCanceledCalled = false; ProgressIndicator indicator = new ProgressIndicatorStub() { @Override public void checkCanceled() throws ProcessCanceledException { checkCanceledCalled = true; if (myFlag) throw new ProcessCanceledException(); } @Override public void cancel() { } }; ensureCheckCanceledCalled(indicator); } public void testNestedIndicatorsAreCanceledRight() { checkCanceledCalled = false; ProgressManager.getInstance().executeProcessUnderProgress(() -> { assertFalse(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); ProgressIndicator indicator = ProgressIndicatorProvider.getGlobalProgressIndicator(); assertTrue(indicator != null && !indicator.isCanceled()); indicator.cancel(); assertTrue(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); assertTrue(indicator.isCanceled()); final ProgressIndicatorEx nested = new ProgressIndicatorBase(); nested.addStateDelegate(new ProgressIndicatorStub() { @Override public void checkCanceled() throws ProcessCanceledException { checkCanceledCalled = true; } }); ProgressManager.getInstance().executeProcessUnderProgress(() -> { assertFalse(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); ProgressIndicator indicator2 = ProgressIndicatorProvider.getGlobalProgressIndicator(); assertTrue(indicator2 != null && !indicator2.isCanceled()); assertSame(indicator2, nested); ProgressManager.checkCanceled(); }, nested); ProgressIndicator indicator3 = ProgressIndicatorProvider.getGlobalProgressIndicator(); assertSame(indicator, indicator3); assertTrue(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); }, new EmptyProgressIndicator()); assertFalse(checkCanceledCalled); } public void testWrappedIndicatorsAreSortedRight() { EmptyProgressIndicator indicator1 = new EmptyProgressIndicator(); DelegatingProgressIndicator indicator2 = new DelegatingProgressIndicator(indicator1); final DelegatingProgressIndicator indicator3 = new DelegatingProgressIndicator(indicator2); ProgressManager.getInstance().executeProcessUnderProgress(() -> { ProgressIndicator current = ProgressIndicatorProvider.getGlobalProgressIndicator(); assertSame(indicator3, current); }, indicator3); assertFalse(checkCanceledCalled); } public void testProgressPerformance() { PlatformTestUtil.startPerformanceTest("progress", 100, () -> { EmptyProgressIndicator indicator = new EmptyProgressIndicator(); for (int i=0;i<100000;i++) { ProgressManager.getInstance().executeProcessUnderProgress(EmptyRunnable.getInstance(), indicator); } }).useLegacyScaling().assertTiming(); } public void testWrapperIndicatorGotCanceledTooWhenInnerIndicatorHas() { final ProgressIndicator progress = new ProgressIndicatorBase(){ @Override protected boolean isCancelable() { return true; } }; try { ProgressManager.getInstance().executeProcessUnderProgress(() -> { assertFalse(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); assertTrue(!progress.isCanceled()); progress.cancel(); assertTrue(CoreProgressManager.threadsUnderCanceledIndicator.contains(Thread.currentThread())); assertTrue(progress.isCanceled()); while (true) { // wait for PCE ProgressManager.checkCanceled(); } }, ProgressWrapper.wrap(progress)); fail("PCE must have been thrown"); } catch (ProcessCanceledException ignored) { } } public void testSOEUnderExtremelyNestedWrappedIndicator() { ProgressIndicator indicator = new DaemonProgressIndicator(); for (int i=0;i<10000;i++) { indicator = new SensitiveProgressWrapper(indicator); } ProgressManager.getInstance().executeProcessUnderProgress(() -> { ProgressIndicator progressIndicator = ProgressIndicatorProvider.getGlobalProgressIndicator(); assertTrue(progressIndicator instanceof SensitiveProgressWrapper); progressIndicator.checkCanceled(); progressIndicator.isCanceled(); }, indicator); } public void testBombedIndicator() { final int count = 10; new BombedProgressIndicator(count).runBombed(() -> { for (int i = 0; i < count * 2; i++) { TimeoutUtil.sleep(10); try { ProgressManager.checkCanceled(); if (i >= count) { ProgressManager.checkCanceled(); fail("PCE expected on " + i + "th check"); } } catch (ProcessCanceledException e) { if (i < count) { fail("Too early PCE"); } } } }); } private static class ProgressIndicatorStub implements ProgressIndicatorEx { private volatile boolean myCanceled; @Override public void addStateDelegate(@NotNull ProgressIndicatorEx delegate) { throw new RuntimeException(); } @Override public boolean isModalityEntered() { throw new RuntimeException(); } @Override public void finish(@NotNull TaskInfo task) { } @Override public boolean isFinished(@NotNull TaskInfo task) { throw new RuntimeException(); } @Override public boolean wasStarted() { throw new RuntimeException(); } @Override public void processFinish() { throw new RuntimeException(); } @Override public void initStateFrom(@NotNull ProgressIndicator indicator) { } @NotNull @Override public Stack<String> getTextStack() { throw new RuntimeException(); } @NotNull @Override public DoubleArrayList getFractionStack() { throw new RuntimeException(); } @NotNull @Override public Stack<String> getText2Stack() { throw new RuntimeException(); } @Override public int getNonCancelableCount() { throw new RuntimeException(); } @Override public void start() { } @Override public void stop() { } @Override public void setText(String text) { throw new RuntimeException(); } @Override public String getText() { throw new RuntimeException(); } @Override public String getText2() { throw new RuntimeException(); } @Override public void setText2(String text) { throw new RuntimeException(); } @Override public double getFraction() { throw new RuntimeException(); } @Override public void setFraction(double fraction) { throw new RuntimeException(); } @Override public void pushState() { throw new RuntimeException(); } @Override public void popState() { throw new RuntimeException(); } @Override public void startNonCancelableSection() { throw new RuntimeException(); } @Override public void finishNonCancelableSection() { throw new RuntimeException(); } @Override public boolean isModal() { return false; } @NotNull @Override public ModalityState getModalityState() { throw new RuntimeException(); } @Override public void setModalityProgress(ProgressIndicator modalityProgress) { throw new RuntimeException(); } @Override public boolean isIndeterminate() { throw new RuntimeException(); } @Override public void setIndeterminate(boolean indeterminate) { throw new RuntimeException(); } @Override public boolean isPopupWasShown() { throw new RuntimeException(); } @Override public boolean isShowing() { throw new RuntimeException(); } @Override public boolean isRunning() { return true; } @Override public void cancel() { myCanceled = true; ProgressManager.canceled(this); } @Override public boolean isCanceled() { return myCanceled; } @Override public void checkCanceled() throws ProcessCanceledException { if (myCanceled) throw new ProcessCanceledException(); } } public void testDefaultModalityWithNestedProgress() { assertEquals(ModalityState.NON_MODAL, ModalityState.defaultModalityState()); ProgressManager.getInstance().run(new Task.Modal(getProject(), "", false) { @Override public void run(@NotNull ProgressIndicator indicator) { try { assertFalse(ModalityState.NON_MODAL.equals(ModalityState.defaultModalityState())); assertEquals(ProgressManager.getInstance().getProgressIndicator().getModalityState(), ModalityState.defaultModalityState()); ProgressManager.getInstance().runProcess(() -> { assertSame(indicator.getModalityState(), ModalityState.defaultModalityState()); assertInvokeAndWaitWorks(); }, new ProgressIndicatorBase()); } catch (Throwable e) { throw new RuntimeException(e); // ProgressManager doesn't handle errors } } }); } public void testProgressWrapperModality() { ProgressManager.getInstance().run(new Task.Modal(getProject(), "", false) { @Override public void run(@NotNull ProgressIndicator indicator) { try { Future<?> future = ApplicationManager.getApplication().executeOnPooledThread( () -> ProgressManager.getInstance().runProcess( () -> assertInvokeAndWaitWorks(), ProgressWrapper.wrap(indicator))); future.get(2000, TimeUnit.MILLISECONDS); } catch (Throwable e) { throw new RuntimeException(e); // ProgressManager doesn't handle errors } } }); } private static void assertInvokeAndWaitWorks() { Semaphore semaphore = new Semaphore(); semaphore.down(); ApplicationManager.getApplication().invokeLater(() -> semaphore.up()); assertTrue("invokeAndWait would deadlock", semaphore.waitFor(1000)); } }
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.controller.mocks; import io.netty.buffer.Unpooled; import io.pravega.client.connection.impl.ConnectionPool; import io.pravega.client.tables.impl.HashTableIteratorItem; import io.pravega.client.tables.impl.TableSegmentEntry; import io.pravega.client.tables.impl.TableSegmentKey; import io.pravega.client.tables.impl.TableSegmentKeyVersion; import io.pravega.common.concurrent.Futures; import io.pravega.common.util.BitConverter; import io.pravega.common.util.ByteArraySegment; import io.pravega.controller.server.SegmentHelper; import io.pravega.controller.server.WireCommandFailedException; import io.pravega.controller.store.host.HostControllerStore; import io.pravega.controller.store.host.HostStoreFactory; import io.pravega.controller.store.host.impl.HostMonitorConfigImpl; import io.pravega.controller.stream.api.grpc.v1.Controller.NodeUri; import io.pravega.controller.stream.api.grpc.v1.Controller.TxnStatus; import io.pravega.shared.protocol.netty.WireCommandType; import io.pravega.shared.protocol.netty.WireCommands; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; public class SegmentHelperMock { private static final int SERVICE_PORT = 12345; public static SegmentHelper getSegmentHelperMock() { SegmentHelper helper = spy(new SegmentHelper(mock(ConnectionPool.class), HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig()), mock(ScheduledExecutorService.class))); doReturn(NodeUri.newBuilder().setEndpoint("localhost").setPort(SERVICE_PORT).build()).when(helper).getSegmentUri( anyString(), anyString(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).sealSegment( anyString(), anyString(), anyLong(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).createSegment( anyString(), anyString(), anyLong(), any(), any(), anyLong(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).deleteSegment( anyString(), anyString(), anyLong(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).createTransaction( anyString(), anyString(), anyLong(), any(), any(), anyLong(), anyLong()); TxnStatus txnStatus = TxnStatus.newBuilder().setStatus(TxnStatus.Status.SUCCESS).build(); doReturn(CompletableFuture.completedFuture(txnStatus)).when(helper).abortTransaction( anyString(), anyString(), anyLong(), any(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(0L)).when(helper).mergeTxnSegments( anyString(), anyString(), anyLong(), anyLong(), any(), any(), anyLong()); doAnswer(x -> { List<Long> list = ((List<UUID>) x.getArgument(4)).stream().map(z -> 0L).collect(Collectors.toList()); return CompletableFuture.completedFuture(list); }).when(helper).mergeTxnSegments( anyString(), anyString(), anyLong(), anyLong(), any(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).updatePolicy( anyString(), anyString(), any(), anyLong(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).truncateSegment( anyString(), anyString(), anyLong(), anyLong(), any(), anyLong()); doReturn(CompletableFuture.completedFuture(new WireCommands.StreamSegmentInfo( 0L, "", true, true, false, 0L, 0L, 0L))) .when(helper).getSegmentInfo(anyString(), anyString(), anyLong(), anyString(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).createTableSegment( anyString(), anyString(), anyLong(), anyBoolean(), anyInt(), anyLong()); doReturn(CompletableFuture.completedFuture(null)).when(helper).deleteTableSegment( anyString(), anyBoolean(), anyString(), anyLong()); return helper; } public static SegmentHelper getFailingSegmentHelperMock() { SegmentHelper helper = spy(new SegmentHelper(mock(ConnectionPool.class), mock(HostControllerStore.class), mock(ScheduledExecutorService.class))); doReturn(NodeUri.newBuilder().setEndpoint("localhost").setPort(SERVICE_PORT).build()).when(helper).getSegmentUri( anyString(), anyString(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).sealSegment( anyString(), anyString(), anyLong(), any(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).createSegment( anyString(), anyString(), anyLong(), any(), any(), anyLong(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).deleteSegment( anyString(), anyString(), anyLong(), any(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).createTransaction( anyString(), anyString(), anyLong(), any(), any(), anyLong(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).abortTransaction( anyString(), anyString(), anyLong(), any(), any(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).mergeTxnSegments( anyString(), anyString(), anyLong(), anyLong(), any(), anyString(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).updatePolicy( anyString(), anyString(), any(), anyLong(), any(), anyLong()); doReturn(Futures.failedFuture(new RuntimeException())).when(helper).createTableSegment( anyString(), anyString(), anyLong(), anyBoolean(), anyInt(), anyLong()); return helper; } public static SegmentHelper getSegmentHelperMockForTables(ScheduledExecutorService executor) { SegmentHelper helper = getSegmentHelperMock(); final Object lock = new Object(); final Map<String, Map<ByteBuffer, TableSegmentEntry>> mapOfTables = new HashMap<>(); final Map<String, Map<ByteBuffer, Long>> mapOfTablesPosition = new HashMap<>(); // region create table doAnswer(x -> { String tableName = x.getArgument(0); return CompletableFuture.runAsync(() -> { synchronized (lock) { mapOfTables.putIfAbsent(tableName, new HashMap<>()); mapOfTablesPosition.putIfAbsent(tableName, new HashMap<>()); } }, executor); }).when(helper).createTableSegment(anyString(), anyString(), anyLong(), anyBoolean(), anyInt(), anyLong()); // endregion // region delete table doAnswer(x -> { String tableName = x.getArgument(0); Boolean mustBeEmpty = x.getArgument(1); final WireCommandType type = WireCommandType.DELETE_TABLE_SEGMENT; return CompletableFuture.supplyAsync(() -> { synchronized (lock) { if (!mapOfTables.containsKey(tableName)) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } boolean empty = Optional.ofNullable(mapOfTables.get(tableName)).orElse(Collections.emptyMap()).isEmpty(); if (!mustBeEmpty || empty) { mapOfTables.remove(tableName); mapOfTablesPosition.remove(tableName); return null; } else { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableSegmentNotEmpty); } } }, executor); }).when(helper).deleteTableSegment(anyString(), anyBoolean(), anyString(), anyLong()); // endregion // region update keys doAnswer(x -> { final WireCommandType type = WireCommandType.UPDATE_TABLE_ENTRIES; String tableName = x.getArgument(0); List<TableSegmentEntry> entries = x.getArgument(1); return CompletableFuture.supplyAsync(() -> { synchronized (lock) { Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName); Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName); if (table == null) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } else { List<TableSegmentKeyVersion> resultList = new LinkedList<>(); entries.forEach(entry -> { ByteBuffer key = entry.getKey().getKey().copy().nioBuffer(); byte[] value = entry.getValue().copy().array(); TableSegmentEntry existingEntry = table.get(key); if (existingEntry == null) { if (entry.getKey().getVersion().equals(TableSegmentKeyVersion.NOT_EXISTS)) { TableSegmentEntry newEntry = TableSegmentEntry.versioned(key.array(), value, 0); table.put(key, newEntry); tablePos.put(key, System.nanoTime()); resultList.add(newEntry.getKey().getVersion()); } else { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyDoesNotExist); } } else if (existingEntry.getKey().getVersion().equals(entry.getKey().getVersion())) { TableSegmentKeyVersion newVersion = TableSegmentKeyVersion.from( existingEntry.getKey().getVersion().getSegmentVersion() + 1); TableSegmentEntry newEntry = TableSegmentEntry.versioned(key.array(), value, newVersion.getSegmentVersion()); table.put(key, newEntry); tablePos.put(key, System.nanoTime()); resultList.add(newVersion); } else { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion); } }); return resultList; } } }, executor); }).when(helper).updateTableEntries(anyString(), any(), anyString(), anyLong()); // endregion // region remove keys doAnswer(x -> { final WireCommandType type = WireCommandType.REMOVE_TABLE_KEYS; String tableName = x.getArgument(0); List<TableSegmentKey> keys = x.getArgument(1); return CompletableFuture.runAsync(() -> { synchronized (lock) { Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName); Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName); if (table == null) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } else { keys.forEach(rawKey -> { ByteBuffer key = rawKey.getKey().copy().nioBuffer(); TableSegmentEntry existingEntry = table.get(key); if (existingEntry != null) { if (existingEntry.getKey().getVersion().equals(rawKey.getVersion()) || rawKey.getVersion() == null || rawKey.getVersion().equals(TableSegmentKeyVersion.NO_VERSION)) { table.remove(key); tablePos.remove(key); } else { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion); } } }); } } }, executor); }).when(helper).removeTableKeys(anyString(), any(), anyString(), anyLong()); // endregion // region read keys doAnswer(x -> { final WireCommandType type = WireCommandType.READ_TABLE; String tableName = x.getArgument(0); List<TableSegmentKey> requestKeys = x.getArgument(1); return CompletableFuture.supplyAsync(() -> { synchronized (lock) { Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName); if (table == null) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } else { List<TableSegmentEntry> resultList = new LinkedList<>(); requestKeys.forEach(requestKey -> { ByteBuffer key = requestKey.getKey().copy().nioBuffer(); TableSegmentEntry existingEntry = table.get(key); if (existingEntry == null) { resultList.add(TableSegmentEntry.notExists(key.array(), new byte[0])); } else if (existingEntry.getKey().getVersion().equals(requestKey.getVersion()) || requestKey.getVersion() == null || requestKey.getVersion().equals(TableSegmentKeyVersion.NO_VERSION)) { resultList.add(duplicate(existingEntry)); } else { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion); } }); return resultList; } } }, executor); }).when(helper).readTable(anyString(), any(), anyString(), anyLong()); // endregion // region readTableKeys doAnswer(x -> { String tableName = x.getArgument(0); int limit = x.getArgument(1); HashTableIteratorItem.State state = x.getArgument(2); final WireCommandType type = WireCommandType.READ_TABLE; return CompletableFuture.supplyAsync(() -> { synchronized (lock) { Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName); Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName); if (table == null) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } else { long floor; if (state.equals(HashTableIteratorItem.State.EMPTY)) { floor = 0L; } else { floor = new ByteArraySegment(state.toBytes()).getLong(0); } AtomicLong token = new AtomicLong(floor); List<TableSegmentKey> list = tablePos.entrySet().stream() .sorted(Comparator.comparingLong(Map.Entry::getValue)) .filter(c -> c.getValue() > floor) .map(r -> { token.set(r.getValue()); return duplicate(table.get(r.getKey()).getKey()); }) .limit(limit).collect(Collectors.toList()); byte[] continuationToken = new byte[Long.BYTES]; BitConverter.writeLong(continuationToken, 0, token.get()); HashTableIteratorItem.State newState = HashTableIteratorItem.State.fromBytes(Unpooled.wrappedBuffer(continuationToken)); return new HashTableIteratorItem<>(newState, list); } } }, executor); }).when(helper).readTableKeys(anyString(), anyInt(), any(), anyString(), anyLong()); // endregion // region readTableEntries doAnswer(x -> { String tableName = x.getArgument(0); int limit = x.getArgument(1); HashTableIteratorItem.State state = x.getArgument(2); final WireCommandType type = WireCommandType.READ_TABLE; return CompletableFuture.supplyAsync(() -> { synchronized (lock) { Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName); Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName); if (table == null) { throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist); } else { long floor; if (state.equals(HashTableIteratorItem.State.EMPTY)) { floor = 0L; } else { floor = new ByteArraySegment(state.toBytes()).getLong(0); } AtomicLong token = new AtomicLong(floor); List<TableSegmentEntry> list = tablePos.entrySet().stream() .sorted(Comparator.comparingLong(Map.Entry::getValue)) .filter(c -> c.getValue() > floor) .map(r -> { token.set(r.getValue()); return duplicate(table.get(r.getKey())); }) .limit(limit).collect(Collectors.toList()); byte[] continuationToken = new byte[Long.BYTES]; BitConverter.writeLong(continuationToken, 0, token.get()); HashTableIteratorItem.State newState = HashTableIteratorItem.State.fromBytes(Unpooled.wrappedBuffer(continuationToken)); return new HashTableIteratorItem<>(newState, list); } } }, executor); }).when(helper).readTableEntries(anyString(), anyInt(), any(), anyString(), anyLong()); // endregion return helper; } private static TableSegmentKey duplicate(TableSegmentKey key) { return key.getVersion().equals(TableSegmentKeyVersion.NOT_EXISTS) ? TableSegmentKey.notExists(key.getKey().copy()) : TableSegmentKey.versioned(key.getKey().copy(), key.getVersion().getSegmentVersion()); } private static TableSegmentEntry duplicate(TableSegmentEntry entry) { return entry.getKey().getVersion().equals(TableSegmentKeyVersion.NOT_EXISTS) ? TableSegmentEntry.notExists(entry.getKey().getKey().copy(), entry.getValue().copy()) : TableSegmentEntry.versioned(entry.getKey().getKey().copy(), entry.getValue().copy(), entry.getKey().getVersion().getSegmentVersion()); } }
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.helios.cli.command; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.spotify.helios.client.HeliosClient; import com.spotify.helios.common.Json; import com.spotify.helios.common.descriptors.JobId; import com.spotify.helios.common.descriptors.RolloutOptions; import com.spotify.helios.common.descriptors.TaskStatus; import com.spotify.helios.common.protocol.DeploymentGroupStatusResponse; import com.spotify.helios.common.protocol.RollingUpdateResponse; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintStream; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; import static net.sourceforge.argparse4j.impl.Arguments.storeTrue; public class RollingUpdateCommand extends WildcardJobCommand { private static final long POLL_INTERVAL_MILLIS = 1000; private final SleepFunction sleepFunction; private final Supplier<Long> timeSupplier; private final Argument nameArg; private final Argument timeoutArg; private final Argument parallelismArg; private final Argument asyncArg; private final Argument rolloutTimeoutArg; private final Argument migrateArg; public RollingUpdateCommand(final Subparser parser) { this(parser, new SleepFunction() { @Override public void sleep(final long millis) throws InterruptedException { Thread.sleep(millis); } }, new Supplier<Long>() { @Override public Long get() { return System.currentTimeMillis(); } }); } @VisibleForTesting RollingUpdateCommand(final Subparser parser, final SleepFunction sleepFunction, final Supplier<Long> timeSupplier) { super(parser); this.sleepFunction = sleepFunction; this.timeSupplier = timeSupplier; parser.help("Initiate a rolling update"); nameArg = parser.addArgument("deployment-group-name") .required(true) .help("Deployment group name"); timeoutArg = parser.addArgument("-t", "--timeout") .setDefault(RolloutOptions.DEFAULT_TIMEOUT) .type(Long.class) .help("Fail rollout if a job takes longer than this to reach RUNNING (seconds)"); parallelismArg = parser.addArgument("-p", "--par") .dest("parallelism") .setDefault(RolloutOptions.DEFAULT_PARALLELISM) .type(Integer.class) .help("Number of hosts to deploy to concurrently"); asyncArg = parser.addArgument("--async") .action(storeTrue()) .help("Don't block until rolling-update is complete"); rolloutTimeoutArg = parser.addArgument("-T", "--rollout-timeout") .setDefault(60L) .type(Long.class) .help("Exit if rolling-update takes longer than the given value (minutes). Note that " + "this will NOT abort the rolling update, it will just cause this command to exit."); migrateArg = parser.addArgument("--migrate") .setDefault(false) .action(storeTrue()) .help("When specified a rolling-update will undeploy not only jobs previously deployed " + "by the deployment-group but also jobs with the same job id. Use it ONCE when " + "migrating a service to using deployment-groups"); } @Override protected int runWithJobId(final Namespace options, final HeliosClient client, final PrintStream out, final boolean json, final JobId jobId, final BufferedReader stdin) throws ExecutionException, InterruptedException, IOException { final String name = options.getString(nameArg.getDest()); final long timeout = options.getLong(timeoutArg.getDest()); final int parallelism = options.getInt(parallelismArg.getDest()); final boolean async = options.getBoolean(asyncArg.getDest()); final long rolloutTimeout = options.getLong(rolloutTimeoutArg.getDest()); final boolean migrate = options.getBoolean(migrateArg.getDest()); checkArgument(timeout > 0, "Timeout must be greater than 0"); checkArgument(parallelism > 0, "Parallelism must be greater than 0"); checkArgument(rolloutTimeout > 0, "Rollout timeout must be greater than 0"); final long startTime = timeSupplier.get(); final RolloutOptions rolloutOptions = RolloutOptions.newBuilder() .setTimeout(timeout) .setParallelism(parallelism) .setMigrate(migrate) .build(); final RollingUpdateResponse response = client.rollingUpdate(name, jobId, rolloutOptions).get(); if (response.getStatus() != RollingUpdateResponse.Status.OK) { if (!json) { out.println("Failed: " + response); } else { out.println(response.toJsonString()); } return 1; } if (!json) { out.println(format("Rolling update%s started: %s -> %s (parallelism=%d, timeout=%d)%s", async ? " (async)" : "", name, jobId.toShortString(), parallelism, timeout, async ? "" : "\n")); } final Map<String, Object> jsonOutput = Maps.newHashMap(); jsonOutput.put("parallelism", parallelism); jsonOutput.put("timeout", timeout); if (async) { if (json) { jsonOutput.put("status", response.getStatus()); out.println(Json.asStringUnchecked(jsonOutput)); } return 0; } String error = ""; boolean failed = false; boolean timedOut = false; final Set<String> reported = Sets.newHashSet(); while (true) { final DeploymentGroupStatusResponse status = client.deploymentGroupStatus(name).get(); if (status == null) { failed = true; error = "Failed to fetch deployment-group status"; break; } if (!jobId.equals(status.getJobId())) { // Another rolling-update was started, overriding this one -- exit failed = true; error = "Deployment-group job id changed during rolling-update"; break; } if (!json) { for (DeploymentGroupStatusResponse.HostStatus hostStatus : status.getHostStatuses()) { final JobId hostJobId = hostStatus.getJobId(); final String host = hostStatus.getHost(); final TaskStatus.State state = hostStatus.getState(); final boolean done = hostJobId != null && hostJobId.equals(jobId) && state == TaskStatus.State.RUNNING; if (done && reported.add(host)) { out.println(format("%s -> %s (%d/%d)", host, state, reported.size(), status.getHostStatuses().size())); } } } if (status.getStatus() != DeploymentGroupStatusResponse.Status.ROLLING_OUT) { if (status.getStatus() == DeploymentGroupStatusResponse.Status.FAILED) { failed = true; error = status.getError(); } break; } if (timeSupplier.get() - startTime > TimeUnit.MINUTES.toMillis(rolloutTimeout)) { // Rollout timed out timedOut = true; break; } sleepFunction.sleep(POLL_INTERVAL_MILLIS); } final double duration = (timeSupplier.get() - startTime) / 1000.0; if (json) { if (failed) { jsonOutput.put("status", "FAILED"); jsonOutput.put("error", error); } else if (timedOut) { jsonOutput.put("status", "TIMEOUT"); } else { jsonOutput.put("status", "DONE"); } jsonOutput.put("duration", duration); out.println(Json.asStringUnchecked(jsonOutput)); } else { out.println(); if (failed) { out.println(format("Failed: %s", error)); } else if (timedOut) { out.println("Timed out! (rolling-update still in progress)"); } else { out.println("Done."); } out.println(format("Duration: %.2f s", duration)); } return (failed || timedOut) ? 1 : 0; } interface SleepFunction { void sleep(long millis) throws InterruptedException; } }
/* * Copyright (c) 2014. Real Time Genomics Limited. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rtg.sam; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import com.rtg.bed.BedRangeLoader; import com.rtg.bed.BedRecord; import com.rtg.bed.SimpleBedRangeLoader; import com.rtg.reader.SequencesReader; import com.rtg.util.diagnostic.Diagnostic; import com.rtg.util.diagnostic.NoTalkbackSlimException; import com.rtg.util.intervals.Interval; import com.rtg.util.intervals.LongRange; import com.rtg.util.intervals.Range; import com.rtg.util.intervals.RangeList; import com.rtg.util.intervals.SimpleRangeMeta; import com.rtg.util.intervals.RangeMeta; import com.rtg.util.intervals.ReferenceRanges; import com.rtg.util.intervals.RegionRestriction; import com.rtg.util.intervals.SequenceNameLocus; import com.rtg.util.intervals.SequenceNameLocusSimple; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMSequenceDictionary; import htsjdk.samtools.SAMSequenceRecord; /** * Utilities for dealing with lists of ranges / regions to be applied when loading SAM. */ public final class SamRangeUtils { private SamRangeUtils() { } /** * Make a reference range list from whatever the params say * @param header the SAM header containing sequence information * @param params supplies restriction range configuration * @return the ReferenceRanges lookup * @throws java.io.IOException if we could not load a required BED file. */ public static ReferenceRanges<String> createReferenceRanges(SAMFileHeader header, SamFilterParams params) throws IOException { final ReferenceRanges<String> nameRangeMap; if (params.bedRegionsFile() != null) { nameRangeMap = createBedReferenceRanges(params.bedRegionsFile(), header); } else if (params.restriction() != null) { // Single restriction region final SamRegionRestriction regionRestriction = params.restriction(); nameRangeMap = createSingleReferenceRange(header, regionRestriction); } else { // no restriction, add full ranges for each sequence nameRangeMap = createFullReferenceRanges(header); } // In theory the above range loading methods have already performed appropriate validation of sequence names and boundaries // Let's validate here just in case. validateRanges(header, nameRangeMap); return nameRangeMap; } /** * Make a reference range list from all regions contained in a BED file. These have had the ranges checked against * the provided sequence dictionary, and a reference sequence id map set. * @param bedFile the BED file to load * @param header the SAM header containing sequence information * @return the ReferenceRanges lookup * @throws java.io.IOException if there was a problem loading the BED file */ public static ReferenceRanges<String> createBedReferenceRanges(File bedFile, SAMFileHeader header) throws IOException { return BedRangeLoader.getReferenceRanges(new ResolvedBedRangeLoader(header), bedFile); } /** * Make a reference range list from all regions contained in a BED file. These have not had the ranges checked against * any sequence dictionary, or any id map set. * @param bedFile the BED file to load * @return the ReferenceRanges lookup * @throws java.io.IOException if there was a problem loading the BED file */ public static ReferenceRanges<String> createBedReferenceRanges(File bedFile) throws IOException { return BedRangeLoader.getReferenceRanges(new SimpleBedRangeLoader(), bedFile); } /** * Make a reference range list corresponding to the full length of all reference sequences * @param header the SAM header containing sequence information * @return the ReferenceRanges lookup */ public static ReferenceRanges<String> createFullReferenceRanges(SAMFileHeader header) { final ReferenceRanges<String> rangeMap = new ReferenceRanges<>(true); for (final SAMSequenceRecord r : header.getSequenceDictionary().getSequences()) { final int rlen = r.getSequenceLength(); if (rlen > 0) { rangeMap.put(r.getSequenceName(), new RangeList<>(new SimpleRangeMeta<>(0, rlen, r.getSequenceName()))); } } rangeMap.setIdMap(SamUtils.getSequenceIdLookup(header.getSequenceDictionary())); return rangeMap; } /** * Make a reference range list corresponding to the full length of all reference sequences * @param sequencesReader sequences reader used to determine sequence names and lengths * @return the ReferenceRanges lookup * @throws IOException if an I/O error occurs */ public static ReferenceRanges<String> createFullReferenceRanges(SequencesReader sequencesReader) throws IOException { final ReferenceRanges<String> rangeMap = new ReferenceRanges<>(false); final Map<String, Integer> idMap = new HashMap<>(); for (int k = 0; k < sequencesReader.numberSequences(); ++k) { final int rlen = sequencesReader.length(k); if (rlen > 0) { final String name = sequencesReader.names().name(k); rangeMap.put(name, new RangeList<>(new SimpleRangeMeta<>(0, rlen, name))); idMap.put(name, k); } } rangeMap.setIdMap(idMap); return rangeMap; } /** * Make a reference range list from a single SamRegionRestriction * @param header the SAM header containing sequence information * @param regionRestriction the region * @return the ReferenceRanges lookup */ public static ReferenceRanges<String> createSingleReferenceRange(SAMFileHeader header, SamRegionRestriction regionRestriction) { final ReferenceRanges<String> rangeMap = new ReferenceRanges<>(false); final SequenceNameLocus resolved = resolveRestriction(header.getSequenceDictionary(), regionRestriction); rangeMap.put(resolved.getSequenceName(), new RangeList<>(new SimpleRangeMeta<>(resolved.getStart(), resolved.getEnd(), regionRestriction.toString()))); rangeMap.setIdMap(SamUtils.getSequenceIdLookup(header.getSequenceDictionary())); return rangeMap; } /** * Make a reference range list from a multiple SamRegionRestrictions * @param header the SAM header containing sequence information * @param regions the region * @return the ReferenceRanges lookup */ public static ReferenceRanges<String> createExplicitReferenceRange(SAMFileHeader header, SamRegionRestriction... regions) { final ReferenceRanges.Accumulator<String> acc = new ReferenceRanges.Accumulator<>(); for (SamRegionRestriction region : regions) { final SequenceNameLocus resolved = resolveRestriction(header.getSequenceDictionary(), region); acc.addRangeData(resolved.getSequenceName(), new SimpleRangeMeta<>(resolved.getStart(), resolved.getEnd(), region.toString())); } final ReferenceRanges<String> ranges = acc.getReferenceRanges(); ranges.setIdMap(SamUtils.getSequenceIdLookup(header.getSequenceDictionary())); return ranges; } /** * This is a somewhat dodgy method of making a ReferenceRanges corresponding to a single region restriction where no * sequence length information is available. Sets missing start to Integer.MIN_VALUE and missing end to Integer.MAX_VALUE. Note * that this has not had the sequence id mapping information supplied either. * @param regions the region restrictions * @return the ReferenceRanges * @throws java.lang.NullPointerException if any of the regions is null */ public static ReferenceRanges<String> createExplicitReferenceRange(SequenceNameLocus... regions) { final ReferenceRanges.Accumulator<String> acc = new ReferenceRanges.Accumulator<>(); for (SequenceNameLocus region : regions) { if (region == null || region.getSequenceName() == null) { throw new NullPointerException(); } final Range wideRange = new Range(region.getStart() == RegionRestriction.MISSING ? Integer.MIN_VALUE : region.getStart(), region.getEnd() == RegionRestriction.MISSING ? Integer.MAX_VALUE : region.getEnd()); acc.addRangeData(region.getSequenceName(), new SimpleRangeMeta<>(wideRange.getStart(), wideRange.getEnd(), region.toString())); } return acc.getReferenceRanges(); } /** * This is a somewhat dodgy method of making a ReferenceRanges corresponding to set of whole chromosomes. * Note that this has not had the sequence id mapping information supplied either. * @param chroms the chromosome names * @return the ReferenceRanges */ public static ReferenceRanges<String> createExplicitReferenceRange(Collection<String> chroms) { final ReferenceRanges.Accumulator<String> acc = new ReferenceRanges.Accumulator<>(); for (String chrom : chroms) { final Range wideRange = new Range(Integer.MIN_VALUE, Integer.MAX_VALUE); acc.addRangeData(chrom, new SimpleRangeMeta<>(wideRange.getStart(), wideRange.getEnd(), chrom)); } return acc.getReferenceRanges(); } // Validation of the supplied ranges against names and lengths in SequenceDictionary static <T> void validateRanges(SAMFileHeader header, ReferenceRanges<T> rangeMap) { for (final String seq : rangeMap.sequenceNames()) { final SAMSequenceRecord r = header.getSequenceDictionary().getSequence(seq); if (r == null) { throw new NoTalkbackSlimException("Sequence \"" + seq + "\" referenced in regions not found in the SAM sequence dictionary."); } if (r.getSequenceLength() > 0) { final RangeList<T> rs = rangeMap.get(seq); if (rs != null) { final List<? extends Interval> ranges = rs.getRangeList(); final Interval last = ranges.get(ranges.size() - 1); if (last.getEnd() > r.getSequenceLength()) { throw new NoTalkbackSlimException("Specified sequence range (" + r.getSequenceName() + ":" + last + ") is outside the length of the sequence (" + r.getSequenceLength() + ")"); } } } } } /** * Resolves an inital range (supplied by the user, and may have unbounded ends) to the available sequences. * If end is greater than number of sequences it sets end to number of sequences. * @param range the range * @param dictionary the dictionary with which to validate/resolve the range * @return the resolved range. * @throws NoTalkbackSlimException if the start is out of range. */ public static SequenceNameLocus resolveRestriction(SAMSequenceDictionary dictionary, SequenceNameLocus range) { final SAMSequenceRecord sequence = dictionary.getSequence(range.getSequenceName()); if (sequence == null) { throw new NoTalkbackSlimException("Sequence \"" + range.getSequenceName() + "\" referenced in region was not found in the SAM sequence dictionary."); } final int start = range.getStart() == SamRegionRestriction.MISSING ? 0 : range.getStart(); final int length = sequence.getSequenceLength(); if (start > length || (length != 0 && start == length)) { // Allow start == 0 if empty sequence throw new NoTalkbackSlimException("The start position \"" + start + "\" must be less than than length of the sequence \"" + length + "\"."); } int end = range.getEnd() == LongRange.MISSING ? length : range.getEnd(); if (end > length) { Diagnostic.warning("The end position \"" + range.getEnd() + "\" is outside the length of the sequence (" + length + "). Defaulting end to \"" + length + "\""); end = length; } return new SequenceNameLocusSimple(range.getSequenceName(), start, end); } /** Adds checking that referenced sequences and coordinates are valid, and sets the ID map */ private static class ResolvedBedRangeLoader extends SimpleBedRangeLoader { private final SAMSequenceDictionary mDictionary; ResolvedBedRangeLoader(SAMFileHeader header) { super(); mDictionary = header.getSequenceDictionary(); } @Override protected RangeMeta<String> getRangeData(BedRecord rec) { SequenceNameLocus region = rec; if (region.getEnd() == region.getStart()) { region = new SequenceNameLocusSimple(rec.getSequenceName(), rec.getStart(), rec.getEnd() + 1); } final SequenceNameLocus r = resolveRestriction(mDictionary, region); return new SimpleRangeMeta<>(r.getStart(), r.getEnd(), getMeta(rec)); } @Override public ReferenceRanges<String> getReferenceRanges() { final ReferenceRanges<String> ranges = super.getReferenceRanges(); ranges.setIdMap(SamUtils.getSequenceIdLookup(mDictionary)); return ranges; } } }
package org.apache.commons.jcs.auxiliary.lateral.xmlrpc; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.commons.jcs.auxiliary.lateral.LateralCacheAttributes; import org.apache.commons.jcs.auxiliary.lateral.LateralCacheInfo; import org.apache.commons.jcs.auxiliary.lateral.LateralElementDescriptor; import org.apache.commons.jcs.auxiliary.lateral.behavior.ICacheServiceNonLocal; import org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheAttributes; import org.apache.commons.jcs.auxiliary.lateral.behavior.ILateralCacheObserver; import org.apache.commons.jcs.engine.CacheElement; import org.apache.commons.jcs.engine.behavior.ICacheElement; import org.apache.commons.jcs.engine.behavior.ICacheListener; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.Serializable; import java.util.Set; /** * A lateral cache service implementation. * * @version $Id: LateralXMLRPCService.java,v 1.8 2002/02/17 07:16:24 asmuts Exp * $ */ public class LateralXMLRPCService implements ICacheServiceNonLocal, ILateralCacheObserver { private static final Log log = LogFactory.getLog( LateralXMLRPCService.class ); private ILateralCacheAttributes ilca; private LateralXMLRPCSender sender; /** * Constructor for the LateralXMLRPCService object * * @param lca * @throws IOException */ public LateralXMLRPCService( ILateralCacheAttributes lca ) throws IOException { this.ilca = lca; try { log.debug( "creating sender" ); sender = new LateralXMLRPCSender( lca ); log.debug( "created sender" ); } catch ( IOException e ) { //log.error( "Could not create sender", e ); // This gets thrown over and over in recovery mode. // The stack trace isn't useful here. log.error( "Could not create sender to [" + lca.getTcpServer() + "] -- " + e.getMessage() ); throw e; } } // -------------------------------------------------------- Service Methods /** * @param item * @throws IOException */ public void update( ICacheElement<K, V> item ) throws IOException { update( item, LateralCacheInfo.listenerId ); } /** * @param item * @param requesterId * @throws IOException */ public void update( ICacheElement<K, V> item, long requesterId ) throws IOException { LateralElementDescriptor led = new LateralElementDescriptor( item ); led.requesterId = requesterId; led.command = led.UPDATE; sender.send( led ); } /** * @param cacheName * @param key * @throws IOException */ public void remove( String cacheName, K key ) throws IOException { remove( cacheName, key, LateralCacheInfo.listenerId ); } /** * @param cacheName * @param key * @param requesterId * @throws IOException */ public void remove( String cacheName, K key, long requesterId ) throws IOException { CacheElement ce = new CacheElement( cacheName, key, null ); LateralElementDescriptor led = new LateralElementDescriptor( ce ); led.requesterId = requesterId; led.command = led.REMOVE; sender.send( led ); } /** * @throws IOException */ public void release() throws IOException { // nothing needs to be done } /** * Will close the connection. * * @param cache * @throws IOException */ public void dispose( String cache ) throws IOException { sender.dispose( cache ); } /** * @return * @param key * @throws IOException */ public Serializable get( String key ) throws IOException { //p( "junk get" ); //return get( cattr.cacheName, key, true ); return null; // nothing needs to be done } /** * @return * @param cacheName * @param key * @throws IOException */ public ICacheElement<K, V> get( String cacheName, K key ) throws IOException { //p( "get(cacheName,key,container)" ); CacheElement ce = new CacheElement( cacheName, key, null ); LateralElementDescriptor led = new LateralElementDescriptor( ce ); //led.requesterId = requesterId; // later led.command = led.GET; return sender.sendAndReceive( led ); //return null; // nothing needs to be done } /** * Gets the set of keys of objects currently in the group * throws UnsupportedOperationException */ public Set<K> getGroupKeys(String cacheName, String group) { if (true) { throw new UnsupportedOperationException("Groups not implemented."); } return null; } /** * @param cacheName * @throws IOException */ public void removeAll( String cacheName ) throws IOException { removeAll( cacheName, LateralCacheInfo.listenerId ); } /** * @param cacheName * @param requesterId * @throws IOException */ public void removeAll( String cacheName, long requesterId ) throws IOException { CacheElement ce = new CacheElement( cacheName, "ALL", null ); LateralElementDescriptor led = new LateralElementDescriptor( ce ); led.requesterId = requesterId; led.command = led.REMOVEALL; sender.send( led ); } /** * @param args */ public static void main( String args[] ) { try { LateralXMLRPCSender sender = new LateralXMLRPCSender( new LateralCacheAttributes() ); // process user input till done boolean notDone = true; String message = null; // wait to dispose BufferedReader br = new BufferedReader( new InputStreamReader( System.in ) ); while ( notDone ) { System.out.println( "enter mesage:" ); message = br.readLine(); CacheElement ce = new CacheElement( "test", "test", message ); LateralElementDescriptor led = new LateralElementDescriptor( ce ); sender.send( led ); } } catch ( Exception e ) { System.out.println( e.toString() ); } } // ILateralCacheObserver methods, do nothing here since // the connection is not registered, the udp service is // is not registered. /** * @param cacheName The feature to be added to the CacheListener attribute * @param obj The feature to be added to the CacheListener attribute * @throws IOException */ public void addCacheListener( String cacheName, ICacheListener obj ) throws IOException { // Empty } /** * @param obj The feature to be added to the CacheListener attribute * @throws IOException */ public void addCacheListener( ICacheListener obj ) throws IOException { // Empty } /** * @param cacheName * @param obj * @throws IOException */ public void removeCacheListener( String cacheName, ICacheListener obj ) throws IOException { // Empty } /** * @param obj * @throws IOException */ public void removeCacheListener( ICacheListener obj ) throws IOException { // Empty } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.12.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.storm.generated; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)") public class DRPCRequest implements org.apache.storm.thrift.TBase<DRPCRequest, DRPCRequest._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCRequest> { private static final org.apache.storm.thrift.protocol.TStruct STRUCT_DESC = new org.apache.storm.thrift.protocol.TStruct("DRPCRequest"); private static final org.apache.storm.thrift.protocol.TField FUNC_ARGS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("func_args", org.apache.storm.thrift.protocol.TType.STRING, (short)1); private static final org.apache.storm.thrift.protocol.TField REQUEST_ID_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("request_id", org.apache.storm.thrift.protocol.TType.STRING, (short)2); private static final org.apache.storm.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new DRPCRequestStandardSchemeFactory(); private static final org.apache.storm.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new DRPCRequestTupleSchemeFactory(); private @org.apache.storm.thrift.annotation.Nullable java.lang.String func_args; // required private @org.apache.storm.thrift.annotation.Nullable java.lang.String request_id; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.storm.thrift.TFieldIdEnum { FUNC_ARGS((short)1, "func_args"), REQUEST_ID((short)2, "request_id"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.storm.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // FUNC_ARGS return FUNC_ARGS; case 2: // REQUEST_ID return REQUEST_ID; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.storm.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments public static final java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.FUNC_ARGS, new org.apache.storm.thrift.meta_data.FieldMetaData("func_args", org.apache.storm.thrift.TFieldRequirementType.REQUIRED, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.REQUEST_ID, new org.apache.storm.thrift.meta_data.FieldMetaData("request_id", org.apache.storm.thrift.TFieldRequirementType.REQUIRED, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.storm.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DRPCRequest.class, metaDataMap); } public DRPCRequest() { } public DRPCRequest( java.lang.String func_args, java.lang.String request_id) { this(); this.func_args = func_args; this.request_id = request_id; } /** * Performs a deep copy on <i>other</i>. */ public DRPCRequest(DRPCRequest other) { if (other.is_set_func_args()) { this.func_args = other.func_args; } if (other.is_set_request_id()) { this.request_id = other.request_id; } } public DRPCRequest deepCopy() { return new DRPCRequest(this); } @Override public void clear() { this.func_args = null; this.request_id = null; } @org.apache.storm.thrift.annotation.Nullable public java.lang.String get_func_args() { return this.func_args; } public void set_func_args(@org.apache.storm.thrift.annotation.Nullable java.lang.String func_args) { this.func_args = func_args; } public void unset_func_args() { this.func_args = null; } /** Returns true if field func_args is set (has been assigned a value) and false otherwise */ public boolean is_set_func_args() { return this.func_args != null; } public void set_func_args_isSet(boolean value) { if (!value) { this.func_args = null; } } @org.apache.storm.thrift.annotation.Nullable public java.lang.String get_request_id() { return this.request_id; } public void set_request_id(@org.apache.storm.thrift.annotation.Nullable java.lang.String request_id) { this.request_id = request_id; } public void unset_request_id() { this.request_id = null; } /** Returns true if field request_id is set (has been assigned a value) and false otherwise */ public boolean is_set_request_id() { return this.request_id != null; } public void set_request_id_isSet(boolean value) { if (!value) { this.request_id = null; } } public void setFieldValue(_Fields field, @org.apache.storm.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case FUNC_ARGS: if (value == null) { unset_func_args(); } else { set_func_args((java.lang.String)value); } break; case REQUEST_ID: if (value == null) { unset_request_id(); } else { set_request_id((java.lang.String)value); } break; } } @org.apache.storm.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case FUNC_ARGS: return get_func_args(); case REQUEST_ID: return get_request_id(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case FUNC_ARGS: return is_set_func_args(); case REQUEST_ID: return is_set_request_id(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that == null) return false; if (that instanceof DRPCRequest) return this.equals((DRPCRequest)that); return false; } public boolean equals(DRPCRequest that) { if (that == null) return false; if (this == that) return true; boolean this_present_func_args = true && this.is_set_func_args(); boolean that_present_func_args = true && that.is_set_func_args(); if (this_present_func_args || that_present_func_args) { if (!(this_present_func_args && that_present_func_args)) return false; if (!this.func_args.equals(that.func_args)) return false; } boolean this_present_request_id = true && this.is_set_request_id(); boolean that_present_request_id = true && that.is_set_request_id(); if (this_present_request_id || that_present_request_id) { if (!(this_present_request_id && that_present_request_id)) return false; if (!this.request_id.equals(that.request_id)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((is_set_func_args()) ? 131071 : 524287); if (is_set_func_args()) hashCode = hashCode * 8191 + func_args.hashCode(); hashCode = hashCode * 8191 + ((is_set_request_id()) ? 131071 : 524287); if (is_set_request_id()) hashCode = hashCode * 8191 + request_id.hashCode(); return hashCode; } @Override public int compareTo(DRPCRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.valueOf(is_set_func_args()).compareTo(other.is_set_func_args()); if (lastComparison != 0) { return lastComparison; } if (is_set_func_args()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.func_args, other.func_args); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_request_id()).compareTo(other.is_set_request_id()); if (lastComparison != 0) { return lastComparison; } if (is_set_request_id()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.request_id, other.request_id); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.storm.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.storm.thrift.protocol.TProtocol iprot) throws org.apache.storm.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.storm.thrift.protocol.TProtocol oprot) throws org.apache.storm.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("DRPCRequest("); boolean first = true; sb.append("func_args:"); if (this.func_args == null) { sb.append("null"); } else { sb.append(this.func_args); } first = false; if (!first) sb.append(", "); sb.append("request_id:"); if (this.request_id == null) { sb.append("null"); } else { sb.append(this.request_id); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.storm.thrift.TException { // check for required fields if (!is_set_func_args()) { throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'func_args' is unset! Struct:" + toString()); } if (!is_set_request_id()) { throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'request_id' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.storm.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { read(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.storm.thrift.TException te) { throw new java.io.IOException(te); } } private static class DRPCRequestStandardSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory { public DRPCRequestStandardScheme getScheme() { return new DRPCRequestStandardScheme(); } } private static class DRPCRequestStandardScheme extends org.apache.storm.thrift.scheme.StandardScheme<DRPCRequest> { public void read(org.apache.storm.thrift.protocol.TProtocol iprot, DRPCRequest struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.storm.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // FUNC_ARGS if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) { struct.func_args = iprot.readString(); struct.set_func_args_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // REQUEST_ID if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) { struct.request_id = iprot.readString(); struct.set_request_id_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.storm.thrift.protocol.TProtocol oprot, DRPCRequest struct) throws org.apache.storm.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.func_args != null) { oprot.writeFieldBegin(FUNC_ARGS_FIELD_DESC); oprot.writeString(struct.func_args); oprot.writeFieldEnd(); } if (struct.request_id != null) { oprot.writeFieldBegin(REQUEST_ID_FIELD_DESC); oprot.writeString(struct.request_id); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class DRPCRequestTupleSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory { public DRPCRequestTupleScheme getScheme() { return new DRPCRequestTupleScheme(); } } private static class DRPCRequestTupleScheme extends org.apache.storm.thrift.scheme.TupleScheme<DRPCRequest> { @Override public void write(org.apache.storm.thrift.protocol.TProtocol prot, DRPCRequest struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TTupleProtocol oprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot; oprot.writeString(struct.func_args); oprot.writeString(struct.request_id); } @Override public void read(org.apache.storm.thrift.protocol.TProtocol prot, DRPCRequest struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot; struct.func_args = iprot.readString(); struct.set_func_args_isSet(true); struct.request_id = iprot.readString(); struct.set_request_id_isSet(true); } } private static <S extends org.apache.storm.thrift.scheme.IScheme> S scheme(org.apache.storm.thrift.protocol.TProtocol proto) { return (org.apache.storm.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.sql.presto; import org.apache.pulsar.shade.org.apache.bookkeeper.stats.StatsProvider; import org.apache.pulsar.shade.org.apache.bookkeeper.stats.NullStatsProvider; import org.apache.pulsar.shade.org.apache.bookkeeper.stats.StatsLogger; import java.util.concurrent.TimeUnit; public class PulsarConnectorMetricsTracker implements AutoCloseable{ private StatsLogger statsLogger; private static final String SCOPE = "split"; /** metric names **/ // time spend waiting to get entry from entry queue because it is empty private static final String ENTRY_QUEUE_DEQUEUE_WAIT_TIME = "entry-queue-dequeue-wait-time"; // total time spend waiting to get entry from entry queue per query private static final String ENTRY_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY = "entry-queue-dequeue-wait-time-per-query"; // number of bytes read from bookkeeper private static final String BYTES_READ = "bytes-read"; // total number of bytes read per query private static final String BYTES_READ_PER_QUERY = "bytes-read-per-query"; // time spent derserializing entries private static final String ENTRY_DESERIALIZE_TIME = "entry-deserialize-time"; // time spent derserializing entries per query private static final String ENTRY_DESERIALIZE_TIME_PER_QUERY = "entry-deserialize-time_per_query"; // time spent waiting for message queue enqueue because message queue is full private static final String MESSAGE_QUEUE_ENQUEUE_WAIT_TIME = "message-queue-enqueue-wait-time"; // time spent waiting for message queue enqueue because message queue is full per query private static final String MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_PER_QUERY = "message-queue-enqueue-wait-time-per-query"; private static final String NUM_MESSAGES_DERSERIALIZED = "num-messages-deserialized"; // number of messages deserialized public static final String NUM_MESSAGES_DERSERIALIZED_PER_ENTRY = "num-messages-deserialized-per-entry"; // number of messages deserialized per query public static final String NUM_MESSAGES_DERSERIALIZED_PER_QUERY = "num-messages-deserialized-per-query"; // number of read attempts. Will fail if queues are full public static final String READ_ATTEMTPS = "read-attempts"; // number of read attempts per query public static final String READ_ATTEMTPS_PER_QUERY= "read-attempts-per-query"; // latency of reads per batch public static final String READ_LATENCY_PER_BATCH = "read-latency-per-batch"; // total read latency per query public static final String READ_LATENCY_PER_QUERY = "read-latency-per-query"; // number of entries per batch public static final String NUM_ENTRIES_PER_BATCH = "num-entries-per-batch"; // number of entries per query public static final String NUM_ENTRIES_PER_QUERY = "num-entries-per-query"; // time spent waiting to dequeue from message queue because its empty public static final String MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY = "message-queue-dequeue-wait-time-per-query"; // time spent deserializing message to record e.g. avro, json, etc public static final String RECORD_DESERIALIZE_TIME = "record-deserialize-time"; // time spent deserializing message to record per query private static final String RECORD_DESERIALIZE_TIME_PER_QUERY = "record-deserialize-time-per-query"; private static final String NUM_RECORD_DESERIALIZED = "num-record-deserialized"; private static final String TOTAL_EXECUTION_TIME = "total-execution-time"; /** internal tracking variables **/ private long ENTRY_QUEUE_DEQUEUE_WAIT_TIME_startTime; private long ENTRY_QUEUE_DEQUEUE_WAIT_TIME_sum = 0L; private long BYTES_READ_sum = 0L; private long ENTRY_DESERIALIZE_TIME_startTime; private long ENTRY_DESERIALIZE_TIME_sum = 0L; private long MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_startTime; private long MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_sum = 0L; private long NUM_MESSAGES_DERSERIALIZED_sum = 0L; private long NUM_MESSAGED_DERSERIALIZED_PER_BATCH = 0L; private long READ_ATTEMTPS_SUCCESS_sum = 0L; private long READ_ATTEMTPS_FAIL_sum = 0L; private long READ_LATENCY_SUCCESS_sum = 0L; private long READ_LATENCY_FAIL_sum = 0L; private long NUM_ENTRIES_PER_BATCH_sum = 0L; private long MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_sum = 0L; private long RECORD_DESERIALIZE_TIME_startTime; private long RECORD_DESERIALIZE_TIME_sum = 0L; public PulsarConnectorMetricsTracker(StatsProvider statsProvider) { this.statsLogger = statsProvider instanceof NullStatsProvider ? null : statsProvider.getStatsLogger(SCOPE); } public void start_ENTRY_QUEUE_DEQUEUE_WAIT_TIME() { if (statsLogger != null) { ENTRY_QUEUE_DEQUEUE_WAIT_TIME_startTime = System.nanoTime(); } } public void end_ENTRY_QUEUE_DEQUEUE_WAIT_TIME() { if (statsLogger != null) { long time = System.nanoTime() - ENTRY_QUEUE_DEQUEUE_WAIT_TIME_startTime; ENTRY_QUEUE_DEQUEUE_WAIT_TIME_sum += time; statsLogger.getOpStatsLogger(ENTRY_QUEUE_DEQUEUE_WAIT_TIME) .registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void register_BYTES_READ(long bytes) { if (statsLogger != null) { BYTES_READ_sum += bytes; statsLogger.getCounter(BYTES_READ).add(bytes); } } public void start_ENTRY_DESERIALIZE_TIME() { if (statsLogger != null) { ENTRY_DESERIALIZE_TIME_startTime = System.nanoTime(); } } public void end_ENTRY_DESERIALIZE_TIME() { if (statsLogger != null) { long time = System.nanoTime() - ENTRY_DESERIALIZE_TIME_startTime; ENTRY_DESERIALIZE_TIME_sum += time; statsLogger.getOpStatsLogger(ENTRY_DESERIALIZE_TIME) .registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void start_MESSAGE_QUEUE_ENQUEUE_WAIT_TIME() { if (statsLogger != null) { MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_startTime = System.nanoTime(); } } public void end_MESSAGE_QUEUE_ENQUEUE_WAIT_TIME() { if (statsLogger != null) { long time = System.nanoTime() - MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_startTime; MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_sum += time; statsLogger.getOpStatsLogger(MESSAGE_QUEUE_ENQUEUE_WAIT_TIME) .registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void incr_NUM_MESSAGES_DESERIALIZED_PER_ENTRY() { if (statsLogger != null) { NUM_MESSAGED_DERSERIALIZED_PER_BATCH++; statsLogger.getCounter(NUM_MESSAGES_DERSERIALIZED).add(1); } } public void end_NUM_MESSAGES_DESERIALIZED_PER_ENTRY() { if (statsLogger != null) { NUM_MESSAGES_DERSERIALIZED_sum += NUM_MESSAGED_DERSERIALIZED_PER_BATCH; statsLogger.getOpStatsLogger(NUM_MESSAGES_DERSERIALIZED_PER_ENTRY) .registerSuccessfulValue(NUM_MESSAGED_DERSERIALIZED_PER_BATCH); NUM_MESSAGED_DERSERIALIZED_PER_BATCH = 0L; } } public void incr_READ_ATTEMPTS_SUCCESS() { if (statsLogger != null) { READ_ATTEMTPS_SUCCESS_sum++; statsLogger.getOpStatsLogger(READ_ATTEMTPS) .registerSuccessfulValue(1L); } } public void incr_READ_ATTEMPTS_FAIL() { if (statsLogger != null) { READ_ATTEMTPS_FAIL_sum++; statsLogger.getOpStatsLogger(READ_ATTEMTPS) .registerFailedValue(1L); } } public void register_READ_LATENCY_PER_BATCH_SUCCESS(long latency) { if (statsLogger != null) { READ_LATENCY_SUCCESS_sum += latency; statsLogger.getOpStatsLogger(READ_LATENCY_PER_BATCH) .registerSuccessfulEvent(latency, TimeUnit.NANOSECONDS); } } public void register_READ_LATENCY_PER_BATCH_FAIL(long latency) { if (statsLogger != null) { READ_LATENCY_FAIL_sum += latency; statsLogger.getOpStatsLogger(READ_LATENCY_PER_BATCH) .registerFailedEvent(latency, TimeUnit.NANOSECONDS); } } public void incr_NUM_ENTRIES_PER_BATCH_SUCCESS(long delta) { if (statsLogger != null) { NUM_ENTRIES_PER_BATCH_sum += delta; statsLogger.getOpStatsLogger(NUM_ENTRIES_PER_BATCH) .registerSuccessfulValue(delta); } } public void incr_NUM_ENTRIES_PER_BATCH_FAIL(long delta) { if (statsLogger != null) { statsLogger.getOpStatsLogger(NUM_ENTRIES_PER_BATCH) .registerFailedValue(delta); } } public void register_MESSAGE_QUEUE_DEQUEUE_WAIT_TIME(long latency) { if (statsLogger != null) { MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_sum += latency; } } public void start_RECORD_DESERIALIZE_TIME() { if (statsLogger != null) { RECORD_DESERIALIZE_TIME_startTime = System.nanoTime(); } } public void end_RECORD_DESERIALIZE_TIME() { if (statsLogger != null) { long time = System.nanoTime() - RECORD_DESERIALIZE_TIME_startTime; RECORD_DESERIALIZE_TIME_sum += time; statsLogger.getOpStatsLogger(RECORD_DESERIALIZE_TIME) .registerSuccessfulEvent(time, TimeUnit.NANOSECONDS); } } public void incr_NUM_RECORD_DESERIALIZED() { if (statsLogger != null) { statsLogger.getCounter(NUM_RECORD_DESERIALIZED).add(1); } } public void register_TOTAL_EXECUTION_TIME(long latency) { if (statsLogger != null) { statsLogger.getOpStatsLogger(TOTAL_EXECUTION_TIME) .registerSuccessfulEvent(latency, TimeUnit.NANOSECONDS); } } @Override public void close() { if (statsLogger != null) { // register total entry dequeue wait time for query statsLogger.getOpStatsLogger(ENTRY_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(ENTRY_QUEUE_DEQUEUE_WAIT_TIME_sum, TimeUnit.NANOSECONDS); //register bytes read per query statsLogger.getOpStatsLogger(BYTES_READ_PER_QUERY) .registerSuccessfulValue(BYTES_READ_sum); // register total time spent deserializing entries for query statsLogger.getOpStatsLogger(ENTRY_DESERIALIZE_TIME_PER_QUERY) .registerSuccessfulEvent(ENTRY_DESERIALIZE_TIME_sum, TimeUnit.NANOSECONDS); // register time spent waiting for message queue enqueue because message queue is full per query statsLogger.getOpStatsLogger(MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(MESSAGE_QUEUE_ENQUEUE_WAIT_TIME_sum, TimeUnit.NANOSECONDS); // register number of messages deserialized per query statsLogger.getOpStatsLogger(NUM_MESSAGES_DERSERIALIZED_PER_QUERY) .registerSuccessfulValue(NUM_MESSAGES_DERSERIALIZED_sum); // register number of read attempts per query statsLogger.getOpStatsLogger(READ_ATTEMTPS_PER_QUERY) .registerSuccessfulValue(READ_ATTEMTPS_SUCCESS_sum); statsLogger.getOpStatsLogger(READ_ATTEMTPS_PER_QUERY) .registerFailedValue(READ_ATTEMTPS_FAIL_sum); // register total read latency for query statsLogger.getOpStatsLogger(READ_LATENCY_PER_QUERY) .registerSuccessfulEvent(READ_LATENCY_SUCCESS_sum, TimeUnit.NANOSECONDS); statsLogger.getOpStatsLogger(READ_LATENCY_PER_QUERY) .registerFailedEvent(READ_LATENCY_FAIL_sum, TimeUnit.NANOSECONDS); // register number of entries per query statsLogger.getOpStatsLogger(NUM_ENTRIES_PER_QUERY) .registerSuccessfulValue(NUM_ENTRIES_PER_BATCH_sum); // register time spent waiting to read for message queue per query statsLogger.getOpStatsLogger(MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_PER_QUERY) .registerSuccessfulEvent(MESSAGE_QUEUE_DEQUEUE_WAIT_TIME_sum, TimeUnit.MILLISECONDS); // register time spent deserializing records per query statsLogger.getOpStatsLogger(RECORD_DESERIALIZE_TIME_PER_QUERY) .registerSuccessfulEvent(RECORD_DESERIALIZE_TIME_sum, TimeUnit.NANOSECONDS); } } }
/** * */ package org.socraticgrid.hl7.services.uc.model; import static org.junit.Assert.*; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.net.URL; import java.sql.Connection; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.ibatis.session.SqlSession; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.FixMethodOrder; import org.junit.Ignore; import org.junit.Test; import org.junit.runners.MethodSorters; import org.socraticgrid.hl7.services.uc.db.MessageHeaderSelectType; import org.socraticgrid.hl7.services.uc.db.UCSDataConnection; import org.socraticgrid.hl7.services.uc.db.UCSDataService; import org.socraticgrid.hl7.services.uc.db.dto.DTOMessageType; import org.socraticgrid.hl7.services.uc.db.dto.UCSDto; import org.socraticgrid.hl7.services.uc.internal.idgenerators.TimeBasedIdGenerator; /** * @author steven * @created Jan 23, 2014 * */ // So the MessageHeader can be inserted then selected then updated...ISUD w/o the D @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class UCSDataService_SimpleMessage_TestIT { static SimpleMessage simpleMessage; static List<Recipient> recipients = new ArrayList<>(); //Because Recipient references DeliveryAddress static List<DeliveryAddress> deliveryAddresses = new ArrayList<>(); //Because DeliveryAddress references Address static List<BaseAddress> addresses = new ArrayList<>(); //Reply to UserContactInfo static UserContactInfo userContactInfo; static Properties userContactInfoProps = new Properties(); static{ userContactInfoProps.setProperty("name","testUser ContactInfo"); userContactInfoProps.setProperty("endpoint","testUser@some.endpoint.com"); } static PhysicalAddress prefAddress; static StringBuffer addressesToBeDeleted = new StringBuffer(); static StringBuffer deliveryAddressesToBeDeleted = new StringBuffer(); static StringBuffer recipientsToBeDeleted = new StringBuffer(); static StringBuffer exceptionsToBeDeleted = new StringBuffer(); static StringBuffer messageBodyToBeDeleted = new StringBuffer(); static String comma = ""; static String subject = "Testing MessageHeader 1 2 3"; static DeliveryAddress sender; static TimeBasedIdGenerator timebasedIdGenerator = new TimeBasedIdGenerator(); @BeforeClass public static void init() throws FileNotFoundException, IOException { System.out.println("\n\n****************************"); System.out.println("************* RUNNING - UCSDataService_SimpleMessage_TestIT"); System.out.println("****************************\n\n"); //We need an actual message impl to set the MessageId and MessageHeaderId simpleMessage = new SimpleMessage(timebasedIdGenerator.getNewId()); SimpleMessageHeader testMessageHeader = simpleMessage.getHeader(); //Set an initial subject testMessageHeader.setSubject(subject); //Set an initial sender sender = TestUtil.buildDeliveryAddressList(1,deliveryAddressesToBeDeleted, addressesToBeDeleted).iterator().next(); testMessageHeader.setSender(sender); //Set some Properties URL url = Thread.currentThread().getContextClassLoader().getResource("Test-Properties.properties"); Properties props = new Properties(); props.load(new FileReader(url.getFile())); testMessageHeader.setProperties(props); //Get some initial recipients - 5 testMessageHeader.setRecipientsList(TestUtil.buildRecipientList(5 ,recipientsToBeDeleted ,deliveryAddressesToBeDeleted ,addressesToBeDeleted)); //Initialize the UserContactInfo userContactInfo = new UserContactInfo(); userContactInfo.setUserContactInfoId(timebasedIdGenerator.getNewId()); userContactInfo.setName(userContactInfoProps.getProperty("name")); userContactInfo.setEndpoint(userContactInfoProps.getProperty("endpoint")); prefAddress = new PhysicalAddress(); prefAddress.setAddressId(timebasedIdGenerator.getNewId()); prefAddress.setAddress("http://someserver.com:8080/someservice/~selliott/sendmessge"); userContactInfo.setPreferredAddress(prefAddress); //Add in the availAddresses userContactInfo.setAddressesByType(TestUtil.buildPhysicalAddressByTypeList(5,addressesToBeDeleted)); testMessageHeader.setReplyTo(userContactInfo); //Add in MessageBody simpleMessage.setParts(TestUtil.buildMessageBodys(2,messageBodyToBeDeleted)); //Add in MessageBody simpleMessage.setExceptions(TestUtil.buildProcessingExceptionList(3,exceptionsToBeDeleted)); } @AfterClass public static void removeElementsFromDB() { SqlSession sqlSession = null; try{ SimpleMessageHeader testMessageHeader = simpleMessage.getHeader(); sqlSession = UCSDataConnection.getSqlSession(); Connection con = sqlSession.getConnection(); Statement stmt = con.createStatement(); stmt.execute("DELETE FROM message_header_join_sender WHERE message_header_id = '"+testMessageHeader.getMessageHeaderId()+"'"); stmt.execute("DELETE FROM message_join_message_body WHERE message_id = '"+testMessageHeader.getMessageId()+"'"); stmt.execute("DELETE FROM message_join_processing_exception WHERE message_id = '"+testMessageHeader.getMessageId()+"'"); stmt.execute("DELETE FROM message_header_join_recipient WHERE message_header_id = '"+testMessageHeader.getMessageHeaderId()+"'"); stmt.execute("DELETE FROM message_header WHERE message_header_id = '"+testMessageHeader.getMessageHeaderId()+"'"); stmt.execute("DELETE FROM recipient WHERE recipient_id IN ( "+recipientsToBeDeleted+" )"); stmt.execute("DELETE FROM delivery_address WHERE delivery_address_id IN ("+deliveryAddressesToBeDeleted+")"); //Clean up addresses stmt.execute("DELETE FROM user_contact_info_join_address WHERE user_contact_info_id = '"+userContactInfo.getUserContactInfoId()+"'"); stmt.execute("DELETE FROM address WHERE address_id IN ("+addressesToBeDeleted+")"); stmt.execute("DELETE FROM user_contact_info WHERE user_contact_info_id = '"+userContactInfo.getUserContactInfoId()+"'"); stmt.execute("DELETE FROM address WHERE address_id = '"+userContactInfo.getPreferredAddress().getAddressId()+"'"); stmt.execute("DELETE FROM message_body WHERE message_body_id IN ("+messageBodyToBeDeleted+")"); stmt.execute("DELETE FROM processing_exception WHERE processing_exception_id IN ("+exceptionsToBeDeleted+")"); } catch(Exception e){ e.printStackTrace(System.err); } finally{ if(sqlSession!=null){ sqlSession.close(); } } } @Test //Insert the MessageHeader public void test1() { UCSDto<SimpleMessage> dto = new UCSDto<>(); dto.getListT().add(simpleMessage); UCSDataService.insertMessage(dto); StringBuffer failures = new StringBuffer(); for(DTOMessageType msgType : dto.getMsgs().keySet()){ if(msgType.equals(DTOMessageType.ERROR) || msgType.equals(DTOMessageType.WARN)){ failures.append(dto.getMsgs().get(msgType)); } } if(failures.length()!=0){ fail(failures.toString()); } } @Ignore("Issues with recipients - And general issues with Ids") @Test //Select the MessageHeader public void test2() { UCSDto<SimpleMessage> dto = new UCSDto<>(); dto.getAdHocParams().put(MessageHeaderSelectType.BYMSGID.getQueryId(),simpleMessage.getHeader().getMessageHeaderId()); UCSDataService.selectMessage(dto); StringBuffer failures = new StringBuffer(); for(DTOMessageType msgType : dto.getMsgs().keySet()){ if(msgType.equals(DTOMessageType.ERROR) || msgType.equals(DTOMessageType.WARN)){ failures.append(dto.getMsgs().get(msgType)); } } if(failures.length()!=0){ fail(failures.toString()); } else{ assertNotNull(dto.getListT().get(0)); SimpleMessage savedMessage = dto.getListT().get(0); SimpleMessageHeader messageHeader = savedMessage.getHeader(); assertTrue(messageHeader.getMessageType().equals(MessageType.SimpleMessage)); assertTrue(messageHeader.getSubject().equals(simpleMessage.getHeader().getSubject())); assertTrue(messageHeader.getSender().equals(simpleMessage.getHeader().getSender())); assertTrue(messageHeader.getProperties().equals(simpleMessage.getHeader().getProperties())); //Equality by Map.equals(Object o) //Check the recipients list assertTrue( (messageHeader.getRecipientsList().size()==simpleMessage.getHeader().getRecipientsList().size()) ); //Check the UserContactInfo UserContactInfo uci = messageHeader.getReplyTo(); assertTrue( uci.getName().equals(userContactInfo.getName()) ); assertTrue( uci.getEndpoint().equals(userContactInfo.getEndpoint()) ); PhysicalAddress userContactPreferredAddress = uci.getPreferredAddress(); assertTrue( userContactPreferredAddress.getAddressId().equals(prefAddress.getAddressId()) ); assertTrue( userContactPreferredAddress.getAddress().equals(prefAddress.getAddress()) ); assertTrue( userContactPreferredAddress.getAddressType().equals(prefAddress.getAddressType()) ); //Check the MessageBodys TODO: do more in depth MessageBody testing assertTrue( savedMessage.getParts().length == simpleMessage.getParts().length ); //Check the ProcessingExceptions TODO: do more in depth ProcessingException testing assertTrue( savedMessage.getExceptions().size() == simpleMessage.getExceptions().size()); } } @Ignore("Issues with recipients - And general issues with Ids") @Test //Update the MessageHeader public void test3() throws FileNotFoundException, IOException { //Get a new subject String nuSubject = "Surprise - this is not the original subject"; simpleMessage.getHeader().setSubject(nuSubject); //Update the sender DeliveryAddress nuSender = TestUtil.buildDeliveryAddressList(1,deliveryAddressesToBeDeleted, addressesToBeDeleted).iterator().next(); simpleMessage.getHeader().setSender(nuSender); //Get a new list of recipients - 2 int numRecipients = 2; simpleMessage.getHeader().setRecipientsList(TestUtil.buildRecipientList(numRecipients ,recipientsToBeDeleted ,deliveryAddressesToBeDeleted ,addressesToBeDeleted)); //Update the properties URL url = Thread.currentThread().getContextClassLoader().getResource("Test-Properties2.properties"); Properties props = new Properties(); props.load(new FileReader(url.getFile())); simpleMessage.getHeader().setProperties(props); //Do the update UCSDto<SimpleMessage> dto = new UCSDto<>(); dto.getListT().add(simpleMessage); UCSDataService.updateMessage(dto); StringBuffer failures = new StringBuffer(); for(DTOMessageType msgType : dto.getMsgs().keySet()){ if(msgType.equals(DTOMessageType.ERROR) || msgType.equals(DTOMessageType.WARN)){ failures.append(dto.getMsgs().get(msgType)); } } if(failures.length()!=0){ fail(failures.toString()); } //Retrieve the updated MessageHeader dto = new UCSDto<>(); dto.getAdHocParams().put(MessageHeaderSelectType.BYMSGID.getQueryId(),simpleMessage.getHeader().getMessageHeaderId()); UCSDataService.selectMessage(dto); failures = new StringBuffer(); for(DTOMessageType msgType : dto.getMsgs().keySet()){ if(msgType.equals(DTOMessageType.ERROR) || msgType.equals(DTOMessageType.WARN)){ failures.append(dto.getMsgs().get(msgType)); } } if(failures.length()!=0){ fail(failures.toString()); } SimpleMessage savedMessage = dto.getListT().get(0); SimpleMessageHeader messageHeader = savedMessage.getHeader(); assertTrue(messageHeader.getMessageType().equals(MessageType.SimpleMessage)); assertTrue(messageHeader.getSubject().equals(nuSubject)); assertTrue(messageHeader.getSender().getDeliveryAddressId().equals(nuSender.getDeliveryAddressId())); assertTrue(messageHeader.getProperties().equals(props)); //Equality by Map.equals(Object o) //Check the recipients list assertTrue( (messageHeader.getRecipientsList().size()==numRecipients) ); //Check the UserContactInfo UserContactInfo uci = messageHeader.getReplyTo(); assertTrue( uci.getName().equals(userContactInfo.getName()) ); assertTrue( uci.getEndpoint().equals(userContactInfo.getEndpoint()) ); PhysicalAddress userContactPreferredAddress = uci.getPreferredAddress(); assertTrue( userContactPreferredAddress.getAddressId().equals(prefAddress.getAddressId()) ); assertTrue( userContactPreferredAddress.getAddress().equals(prefAddress.getAddress()) ); assertTrue( userContactPreferredAddress.getAddressType().equals(prefAddress.getAddressType()) ); //Check the MessageBodys TODO: do more in depth MessageBody testing assertTrue( savedMessage.getParts().length == simpleMessage.getParts().length ); //Check the ProcessingExceptions TODO: do more in depth ProcessingException testing assertTrue( savedMessage.getExceptions().size() == simpleMessage.getExceptions().size()); } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.masterdb.security; import static org.testng.AssertJUnit.assertNotNull; import java.lang.reflect.Constructor; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.apache.commons.lang.RandomStringUtils; import org.joda.beans.BeanBuilder; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaBean; import org.joda.beans.MetaProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.Test; import org.threeten.bp.Clock; import org.threeten.bp.LocalDate; import org.threeten.bp.LocalDateTime; import org.threeten.bp.LocalTime; import org.threeten.bp.ZoneId; import org.threeten.bp.ZoneOffset; import org.threeten.bp.ZonedDateTime; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.core.id.ExternalSchemes; import com.opengamma.core.region.Region; import com.opengamma.core.region.RegionSource; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.businessday.BusinessDayConventions; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.convention.daycount.DayCountFactory; import com.opengamma.financial.convention.daycount.DayCounts; import com.opengamma.financial.convention.frequency.Frequency; import com.opengamma.financial.convention.frequency.SimpleFrequency; import com.opengamma.financial.convention.yield.SimpleYieldConvention; import com.opengamma.financial.convention.yield.YieldConvention; import com.opengamma.financial.security.bond.CorporateBondSecurity; import com.opengamma.financial.security.bond.GovernmentBondSecurity; import com.opengamma.financial.security.bond.MunicipalBondSecurity; import com.opengamma.financial.security.capfloor.CapFloorCMSSpreadSecurity; import com.opengamma.financial.security.capfloor.CapFloorSecurity; import com.opengamma.financial.security.cash.CashSecurity; import com.opengamma.financial.security.cashflow.CashFlowSecurity; import com.opengamma.financial.security.cds.CDSIndexComponentBundle; import com.opengamma.financial.security.cds.CDSIndexTerms; import com.opengamma.financial.security.cds.CDSSecurity; import com.opengamma.financial.security.cds.CreditDefaultSwapIndexComponent; import com.opengamma.financial.security.cds.CreditDefaultSwapIndexDefinitionSecurity; import com.opengamma.financial.security.cds.CreditDefaultSwapIndexSecurity; import com.opengamma.financial.security.cds.LegacyFixedRecoveryCDSSecurity; import com.opengamma.financial.security.cds.LegacyRecoveryLockCDSSecurity; import com.opengamma.financial.security.cds.LegacyVanillaCDSSecurity; import com.opengamma.financial.security.cds.StandardFixedRecoveryCDSSecurity; import com.opengamma.financial.security.cds.StandardRecoveryLockCDSSecurity; import com.opengamma.financial.security.cds.StandardVanillaCDSSecurity; import com.opengamma.financial.security.equity.EquitySecurity; import com.opengamma.financial.security.equity.EquityVarianceSwapSecurity; import com.opengamma.financial.security.equity.GICSCode; import com.opengamma.financial.security.fra.FRASecurity; import com.opengamma.financial.security.future.AgricultureFutureSecurity; import com.opengamma.financial.security.future.BondFutureDeliverable; import com.opengamma.financial.security.future.BondFutureSecurity; import com.opengamma.financial.security.future.EnergyFutureSecurity; import com.opengamma.financial.security.future.FXFutureSecurity; import com.opengamma.financial.security.future.IndexFutureSecurity; import com.opengamma.financial.security.future.InterestRateFutureSecurity; import com.opengamma.financial.security.future.MetalFutureSecurity; import com.opengamma.financial.security.future.StockFutureSecurity; import com.opengamma.financial.security.fx.FXForwardSecurity; import com.opengamma.financial.security.fx.NonDeliverableFXForwardSecurity; import com.opengamma.financial.security.index.BondIndex; import com.opengamma.financial.security.index.BondIndexComponent; import com.opengamma.financial.security.index.EquityIndex; import com.opengamma.financial.security.index.EquityIndexComponent; import com.opengamma.financial.security.index.IborIndex; import com.opengamma.financial.security.index.IndexFamily; import com.opengamma.financial.security.index.OvernightIndex; import com.opengamma.financial.security.option.AmericanExerciseType; import com.opengamma.financial.security.option.AsianExerciseType; import com.opengamma.financial.security.option.AssetOrNothingPayoffStyle; import com.opengamma.financial.security.option.AsymmetricPoweredPayoffStyle; import com.opengamma.financial.security.option.BarrierPayoffStyle; import com.opengamma.financial.security.option.BermudanExerciseType; import com.opengamma.financial.security.option.CappedPoweredPayoffStyle; import com.opengamma.financial.security.option.CashOrNothingPayoffStyle; import com.opengamma.financial.security.option.CreditDefaultSwapOptionSecurity; import com.opengamma.financial.security.option.EquityBarrierOptionSecurity; import com.opengamma.financial.security.option.EquityIndexDividendFutureOptionSecurity; import com.opengamma.financial.security.option.EquityIndexOptionSecurity; import com.opengamma.financial.security.option.EquityOptionSecurity; import com.opengamma.financial.security.option.EuropeanExerciseType; import com.opengamma.financial.security.option.ExerciseType; import com.opengamma.financial.security.option.ExtremeSpreadPayoffStyle; import com.opengamma.financial.security.option.FXBarrierOptionSecurity; import com.opengamma.financial.security.option.FXDigitalOptionSecurity; import com.opengamma.financial.security.option.FXOptionSecurity; import com.opengamma.financial.security.option.FadeInPayoffStyle; import com.opengamma.financial.security.option.FixedStrikeLookbackPayoffStyle; import com.opengamma.financial.security.option.FloatingStrikeLookbackPayoffStyle; import com.opengamma.financial.security.option.GapPayoffStyle; import com.opengamma.financial.security.option.IRFutureOptionSecurity; import com.opengamma.financial.security.option.NonDeliverableFXOptionSecurity; import com.opengamma.financial.security.option.PayoffStyle; import com.opengamma.financial.security.option.PoweredPayoffStyle; import com.opengamma.financial.security.option.SimpleChooserPayoffStyle; import com.opengamma.financial.security.option.SupersharePayoffStyle; import com.opengamma.financial.security.option.SwaptionSecurity; import com.opengamma.financial.security.option.VanillaPayoffStyle; import com.opengamma.financial.security.swap.CommodityNotional; import com.opengamma.financial.security.swap.FixedInterestRateLeg; import com.opengamma.financial.security.swap.FixedVarianceSwapLeg; import com.opengamma.financial.security.swap.FloatingGearingIRLeg; import com.opengamma.financial.security.swap.FloatingInterestRateLeg; import com.opengamma.financial.security.swap.FloatingSpreadIRLeg; import com.opengamma.financial.security.swap.FloatingVarianceSwapLeg; import com.opengamma.financial.security.swap.ForwardSwapSecurity; import com.opengamma.financial.security.swap.InterestRateNotional; import com.opengamma.financial.security.swap.Notional; import com.opengamma.financial.security.swap.SecurityNotional; import com.opengamma.financial.security.swap.SwapLeg; import com.opengamma.financial.security.swap.SwapSecurity; import com.opengamma.financial.security.test.AbstractSecurityTestCaseAdapter; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.UniqueId; import com.opengamma.master.region.RegionMaster; import com.opengamma.master.region.impl.InMemoryRegionMaster; import com.opengamma.master.region.impl.MasterRegionSource; import com.opengamma.master.region.impl.RegionFileReader; import com.opengamma.master.security.ManageableSecurity; import com.opengamma.master.security.RawSecurity; import com.opengamma.util.i18n.Country; import com.opengamma.util.money.Currency; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.Expiry; import com.opengamma.util.time.ExpiryAccuracy; import com.opengamma.util.time.Tenor; import com.opengamma.util.tuple.Pairs; /** * Creates random securities. */ @SuppressWarnings("unchecked") @Test(groups = TestGroup.UNIT) public abstract class SecurityTestCase extends AbstractSecurityTestCaseAdapter { private static final Logger LOGGER = LoggerFactory.getLogger(SecurityTestCase.class); private interface TestDataProvider<T> { void getValues(Collection<T> values); } private static final class DefaultObjectPermute<T> implements TestDataProvider<T> { private final Class<T> _clazz; private DefaultObjectPermute(final Class<T> clazz) { _clazz = clazz; } public static <T> DefaultObjectPermute<T> of(final Class<T> clazz) { return new DefaultObjectPermute<>(clazz); } @Override public void getValues(final Collection<T> values) { values.addAll(permuteTestObjects(_clazz)); } } private final static class DefaultCollection<T, C extends Collection<T>> implements TestDataProvider<C> { private final Class<C> _collection; private final Class<T> _clazz; private DefaultCollection(final Class<C> collection, final Class<T> clazz) { _collection = collection; _clazz = clazz; } public static <T, C extends Collection<T>> DefaultCollection<T, C> of(final Class<C> collection, final Class<T> clazz) { return new DefaultCollection<>(collection, clazz); } @Override public void getValues(final Collection<C> values) { try { final C collection = _collection.newInstance(); collection.addAll(permuteTestObjects(_clazz)); if (collection.size() > 0) { values.add(_collection.newInstance()); if (collection.size() > 1) { final C value = _collection.newInstance(); value.add(collection.iterator().next()); values.add(value); } } values.add(collection); } catch (final InstantiationException ex) { // TODO Auto-generated catch block ex.printStackTrace(); } catch (final IllegalAccessException ex) { // TODO Auto-generated catch block ex.printStackTrace(); } } } private static final class DefaultList<T, C extends List<T>> implements TestDataProvider<C> { private final Class<C> _collection; private final Class<T> _clazz; private DefaultList(final Class<C> collection, final Class<T> clazz) { _collection = collection; _clazz = clazz; } public static <T, C extends List<T>> DefaultList<T, C> of(final Class<C> collection, final Class<T> clazz) { return new DefaultList<>(collection, clazz); } @Override public void getValues(final Collection<C> values) { try { final C collection = _collection.newInstance(); collection.addAll(permuteTestObjects(_clazz)); if (collection.size() > 0) { values.add(_collection.newInstance()); if (collection.size() > 1) { final C value = _collection.newInstance(); value.add(collection.iterator().next()); values.add(value); } } values.add(collection); } catch (final InstantiationException ex) { // TODO Auto-generated catch block ex.printStackTrace(); } catch (final IllegalAccessException ex) { // TODO Auto-generated catch block ex.printStackTrace(); } } } private static Map<Object, TestDataProvider<?>> s_dataProviders = new HashMap<>(); private static Random s_random = new Random(); private static RegionSource s_regionSource; static { final RegionMaster regionMaster = new InMemoryRegionMaster(); RegionFileReader.createPopulated(regionMaster); s_regionSource = new MasterRegionSource(regionMaster); } protected static RegionSource getRegionSource() { return s_regionSource; } static { final long seed = s_random.nextLong(); LOGGER.info("Random seed = {}", seed); s_random.setSeed(seed); TestDataProvider<?> provider; s_dataProviders.put(String.class, new TestDataProvider<String>() { @Override public void getValues(final Collection<String> values) { values.add(""); values.add(RandomStringUtils.randomAlphabetic(16)); values.add(RandomStringUtils.randomNumeric(16)); values.add(RandomStringUtils.randomAlphanumeric(16)); } }); s_dataProviders.put(Map.class, new TestDataProvider<Map<?, ?>>() { private Map<?, ?> generateRandomMap(int count) { final Map<String, String> map = new HashMap<>(count); while (count > 0) { map.put(RandomStringUtils.randomAlphanumeric(16), RandomStringUtils.randomAlphanumeric(16)); count--; } return map; } @Override public void getValues(final Collection<Map<?, ?>> values) { double qty = 1 + s_random.nextInt(9); while (qty > 0) { values.add(generateRandomMap(1 + s_random.nextInt(9))); qty--; } values.add(new HashMap<>()); } }); s_dataProviders.put(SortedMap.class, new TestDataProvider<SortedMap<Tenor, ExternalId>>() { private SortedMap<Tenor, ExternalId> generateRandomMap(int count) { final SortedMap<Tenor, ExternalId> map = new TreeMap<>(); while (count > 0) { Tenor tenor; switch (s_random.nextInt(3)) { case 0: tenor = Tenor.ofDays(s_random.nextInt(28) + 1); break; case 1: tenor = Tenor.ofMonths(s_random.nextInt(12) + 1); break; case 2: tenor = Tenor.ofYears(s_random.nextInt(20) + 2000); break; default: throw new OpenGammaRuntimeException("Should never happen"); } map.put(tenor, ExternalId.of(RandomStringUtils.randomAlphanumeric(16), RandomStringUtils.randomAlphanumeric(16))); count--; } return map; } @Override public void getValues(final Collection<SortedMap<Tenor, ExternalId>> values) { double qty = 1 + s_random.nextInt(9); while (qty > 0) { values.add(generateRandomMap(1 + s_random.nextInt(9))); qty--; } values.add(new TreeMap<Tenor, ExternalId>()); } }); s_dataProviders.put(Set.class, new TestDataProvider<Set<String>>() { @Override public void getValues(final Collection<Set<String>> values) { values.add(Sets.newHashSet(getRandomPermissions())); values.add(Sets.newHashSet(getRandomPermissions())); values.add(Sets.newHashSet(getRandomPermissions())); } }); s_dataProviders.put(Double.class, provider = new TestDataProvider<Double>() { @Override public void getValues(final Collection<Double> values) { values.add(0.0); double d; do { d = s_random.nextDouble(); } while (d == 0); values.add(d * 100.0); values.add(d * -100.0); } }); s_dataProviders.put(Double.TYPE, provider); s_dataProviders.put(UniqueId.class, new TestDataProvider<UniqueId>() { @Override public void getValues(final Collection<UniqueId> values) { values.add(UniqueId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16))); } }); s_dataProviders.put(ExternalId.class, new TestDataProvider<ExternalId>() { @Override public void getValues(final Collection<ExternalId> values) { values.add(ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16))); values.add(ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16))); values.add(ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16))); } }); s_dataProviders.put(ExternalIdBundle.class, new TestDataProvider<ExternalIdBundle>() { @Override public void getValues(final Collection<ExternalIdBundle> values) { values.add( ExternalIdBundle.of( ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)))); values.add( ExternalIdBundle.of( ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)), ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)))); values.add( ExternalIdBundle.of( ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)), ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)), ExternalId.of(RandomStringUtils.randomAlphanumeric(8), RandomStringUtils.randomAlphanumeric(16)))); } }); s_dataProviders.put(Currency.class, new TestDataProvider<Currency>() { @Override public void getValues(final Collection<Currency> values) { values.add(Currency.of(RandomStringUtils.randomAlphabetic(3).toUpperCase(Locale.ENGLISH))); } }); s_dataProviders.put(YieldConvention.class, new TestDataProvider<YieldConvention>() { @Override public void getValues(final Collection<YieldConvention> values) { values.add(SimpleYieldConvention.US_STREET); values.add(SimpleYieldConvention.US_TREASURY_EQUIVALANT); values.add(SimpleYieldConvention.TRUE); } }); s_dataProviders.put(Expiry.class, DefaultObjectPermute.of(Expiry.class)); s_dataProviders.put(ZonedDateTime.class, new TestDataProvider<ZonedDateTime>() { private final ZoneId[] _timezones = new ZoneId[] {ZoneOffset.UTC, ZoneId.of("UTC-01:00"), ZoneId.of("UTC+01:00") }; @Override public void getValues(final Collection<ZonedDateTime> values) { for (final ZoneId timezone : _timezones) { values.add(ZonedDateTime.now(Clock.system(timezone)).withNano(0)); // TODO: random date in the past // TODO: random date in the future } } }); s_dataProviders.put(LocalDate.class, new TestDataProvider<LocalDate>() { @Override public void getValues(final Collection<LocalDate> values) { values.add(LocalDate.now()); // TODO: random date in the past // TODO: random date in the future } }); s_dataProviders.put(LocalTime.class, new TestDataProvider<LocalTime>() { @Override public void getValues(final Collection<LocalTime> values) { values.add(LocalTime.now().withNano(0)); // TODO: random time in the past // TODO: random time in the future } }); s_dataProviders.put(LocalDateTime.class, new TestDataProvider<LocalDateTime>() { @Override public void getValues(final Collection<LocalDateTime> values) { final Collection<LocalDate> dates = getTestObjects(LocalDate.class, null); final Collection<LocalTime> times = getTestObjects(LocalTime.class, null); for (final LocalDate date : dates) { for (final LocalTime time : times) { values.add(LocalDateTime.of(date, time)); } } } }); s_dataProviders.put(Frequency.class, new TestDataProvider<Frequency>() { @Override public void getValues(final Collection<Frequency> values) { values.add(SimpleFrequency.ANNUAL); values.add(SimpleFrequency.SEMI_ANNUAL); values.add(SimpleFrequency.CONTINUOUS); } }); s_dataProviders.put(Tenor.class, new TestDataProvider<Tenor>() { @Override public void getValues(final Collection<Tenor> values) { values.add(Tenor.ONE_DAY); values.add(Tenor.ONE_WEEK); values.add(Tenor.ONE_MONTH); values.add(Tenor.ONE_YEAR); } }); s_dataProviders.put(DayCount.class, new TestDataProvider<DayCount>() { @Override public void getValues(final Collection<DayCount> values) { values.add(DayCounts.ACT_ACT_ISDA); values.add(DayCountFactory.of("1/1")); values.add(DayCountFactory.of("Bond Basis")); } }); s_dataProviders.put(BusinessDayConvention.class, new TestDataProvider<BusinessDayConvention>() { @Override public void getValues(final Collection<BusinessDayConvention> values) { values.add(BusinessDayConventions.FOLLOWING); values.add(BusinessDayConventions.MODIFIED_FOLLOWING); values.add(BusinessDayConventions.PRECEDING); } }); s_dataProviders.put(GICSCode.class, new TestDataProvider<GICSCode>() { @Override public void getValues(final Collection<GICSCode> values) { final int code = (((s_random.nextInt(90) + 10) * 100 + s_random.nextInt(90) + 10) * 100 + s_random.nextInt(90) + 10) * 100 + s_random.nextInt(90) + 10; values.add(GICSCode.of(Integer.toString(code))); } }); s_dataProviders.put(Pairs.of(BondFutureSecurity.class, Collection.class), DefaultCollection.of(ArrayList.class, BondFutureDeliverable.class)); s_dataProviders.put(Pairs.of(BondFutureSecurity.class, List.class), DefaultList.of(ArrayList.class, BondFutureDeliverable.class)); s_dataProviders.put(ExerciseType.class, new TestDataProvider<ExerciseType>() { @Override public void getValues(final Collection<ExerciseType> values) { values.add(new AmericanExerciseType()); values.add(new AsianExerciseType()); values.add(new BermudanExerciseType()); values.add(new EuropeanExerciseType()); } }); s_dataProviders.put(PayoffStyle.class, new TestDataProvider<PayoffStyle>() { @Override public void getValues(final Collection<PayoffStyle> values) { values.add(new AssetOrNothingPayoffStyle()); values.add(new AsymmetricPoweredPayoffStyle(s_random.nextDouble())); values.add(new BarrierPayoffStyle()); values.add(new CappedPoweredPayoffStyle(s_random.nextDouble(), s_random.nextDouble())); values.add(new CashOrNothingPayoffStyle(s_random.nextDouble())); values.add(new FadeInPayoffStyle(s_random.nextDouble(), s_random.nextDouble())); values.add(new FixedStrikeLookbackPayoffStyle()); values.add(new FloatingStrikeLookbackPayoffStyle()); values.add(new GapPayoffStyle(s_random.nextDouble())); values.add(new PoweredPayoffStyle(s_random.nextDouble())); values.add(new SupersharePayoffStyle(s_random.nextDouble(), s_random.nextDouble())); values.add(new VanillaPayoffStyle()); values.add(new ExtremeSpreadPayoffStyle(ZonedDateTime.now().withNano(0), s_random.nextBoolean())); values.add(new SimpleChooserPayoffStyle(ZonedDateTime.now().withNano(0), s_random.nextDouble(), new Expiry(ZonedDateTime.now(Clock.systemDefaultZone()), ExpiryAccuracy.MONTH_YEAR))); } }); s_dataProviders.put(Boolean.class, provider = new TestDataProvider<Boolean>() { @Override public void getValues(final Collection<Boolean> values) { values.add(true); values.add(false); } }); s_dataProviders.put(Boolean.TYPE, provider); s_dataProviders.put(SwapLeg.class, new TestDataProvider<SwapLeg>() { @Override public void getValues(final Collection<SwapLeg> values) { values.addAll(permuteTestObjects(FloatingSpreadIRLeg.class)); values.addAll(permuteTestObjects(FloatingGearingIRLeg.class)); values.addAll(permuteTestObjects(FixedInterestRateLeg.class)); values.addAll(permuteTestObjects(FloatingInterestRateLeg.class)); values.addAll(permuteTestObjects(FixedVarianceSwapLeg.class)); values.addAll(permuteTestObjects(FloatingVarianceSwapLeg.class)); } }); s_dataProviders.put(Region.class, new TestDataProvider<Region>() { @Override public void getValues(final Collection<Region> values) { values.add(getRegionSource().getHighestLevelRegion(ExternalSchemes.countryRegionId(Country.US))); values.add(getRegionSource().getHighestLevelRegion(ExternalSchemes.countryRegionId(Country.GB))); } }); s_dataProviders.put(Notional.class, new TestDataProvider<Notional>() { @Override public void getValues(final Collection<Notional> values) { values.add(new CommodityNotional()); values.addAll(permuteTestObjects(InterestRateNotional.class)); values.addAll(permuteTestObjects(SecurityNotional.class)); } }); s_dataProviders.put(BigDecimal.class, new TestDataProvider<BigDecimal>() { @Override public void getValues(final Collection<BigDecimal> values) { values.add(BigDecimal.ONE); } }); s_dataProviders.put(InterestRateNotional.class, new TestDataProvider<Notional>() { @Override public void getValues(final Collection<Notional> values) { values.addAll(permuteTestObjects(InterestRateNotional.class)); } }); s_dataProviders.put(byte[].class, new TestDataProvider<byte[]>() { @Override public void getValues(final Collection<byte[]> values) { values.add(getRandomBytes()); } private byte[] getRandomBytes() { final byte[] randomBytes = new byte[s_random.nextInt(100) + 10]; s_random.nextBytes(randomBytes); return randomBytes; } }); s_dataProviders.put(int.class, new TestDataProvider<Integer>() { @Override public void getValues(final Collection<Integer> values) { values.add(0); int i; do { i = s_random.nextInt(); } while (i == 0); values.add(i * 100); values.add(i * -100); } }); s_dataProviders.put(CDSIndexTerms.class, new TestDataProvider<CDSIndexTerms>() { @Override public void getValues(final Collection<CDSIndexTerms> values) { values.add(CDSIndexTerms.EMPTY); final List<Tenor> tenors = getTestObjects(Tenor.class, null); if (!tenors.isEmpty()) { values.add(CDSIndexTerms.of(tenors.iterator().next())); } values.add(CDSIndexTerms.of(getTestObjects(Tenor.class, null))); } }); s_dataProviders.put(CreditDefaultSwapIndexComponent.class, new TestDataProvider<CreditDefaultSwapIndexComponent>() { @Override public void getValues(final Collection<CreditDefaultSwapIndexComponent> values) { values.add(new CreditDefaultSwapIndexComponent(null, null, null, null)); } }); s_dataProviders.put(CDSIndexComponentBundle.class, new TestDataProvider<CDSIndexComponentBundle>() { @Override public void getValues(final Collection<CDSIndexComponentBundle> values) { final Collection<CreditDefaultSwapIndexComponent> components = permuteTestObjects(CreditDefaultSwapIndexComponent.class); if (!components.isEmpty()) { values.add(CDSIndexComponentBundle.of(components.iterator().next())); } values.add(CDSIndexComponentBundle.of(permuteTestObjects(CreditDefaultSwapIndexComponent.class))); } }); s_dataProviders.put(Pairs.of(BondIndex.class, Collection.class), DefaultCollection.of(ArrayList.class, BondIndexComponent.class)); s_dataProviders.put(Pairs.of(BondIndex.class, List.class), DefaultList.of(ArrayList.class, BondIndexComponent.class)); s_dataProviders.put(Pairs.of(EquityIndex.class, Collection.class), DefaultCollection.of(ArrayList.class, EquityIndexComponent.class)); s_dataProviders.put(Pairs.of(EquityIndex.class, List.class), DefaultList.of(ArrayList.class, EquityIndexComponent.class)); s_dataProviders.put(Pairs.of(IndexFamily.class, Collection.class), DefaultCollection.of(ArrayList.class, IndexFamily.class)); s_dataProviders.put(Pairs.of(IndexFamily.class, List.class), DefaultList.of(ArrayList.class, IndexFamily.class)); } protected static <T> List<T> getTestObjects(final Class<T> clazz, final Class<?> parent) { final List<T> objects = new ArrayList<>(); if (clazz.isEnum()) { for (final T value : clazz.getEnumConstants()) { objects.add(value); } } else { final Object key; if (Collection.class.equals(clazz)) { key = Pairs.of(parent, clazz); } else if (List.class.equals(clazz)) { key = Pairs.of(parent, clazz); } else { key = clazz; } final TestDataProvider<T> provider = (TestDataProvider<T>) s_dataProviders.get(key); if (provider == null) { throw new IllegalArgumentException("No random provider for " + clazz); } provider.getValues(objects); } Collections.shuffle(objects); return objects; } @SuppressWarnings("rawtypes") private static List getRandomPermissions() { final List permissions = Lists.newArrayList(); s_dataProviders.get(String.class).getValues(permissions); return permissions; } private static <T> Constructor<T> getBiggestConstructor(final Class<T> clazz) { final Constructor<T>[] constructors = (Constructor<T>[]) clazz.getConstructors(); int max = -1, bestIndex = -1; for (int i = 0; i < constructors.length; i++) { final Class<?>[] parameters = constructors[i].getParameterTypes(); if (parameters.length > max) { max = parameters.length; bestIndex = i; } } return constructors[bestIndex]; } private static <T> Collection<T> permuteTestObjects(final Class<T> clazz, final Constructor<T> constructor) { final Collection<T> objects = new LinkedList<>(); final Class<?>[] parameters = constructor.getParameterTypes(); final List<?>[] parameterValues = new List<?>[parameters.length]; final int[] parameterIndex = new int[parameters.length]; int longest = 0; for (int i = 0; i < parameters.length; i++) { parameterValues[i] = getTestObjects(parameters[i], clazz); if (parameterValues[i].size() > longest) { longest = parameterValues[i].size(); } } final Object[] construct = new Object[parameters.length]; final List<Throwable> exceptions = new LinkedList<>(); // TODO: what about nulls ? for (int i = 0; i < longest; i++) { for (int j = 0; j < parameters.length; j++) { construct[j] = parameterValues[j].get(parameterIndex[j]); parameterIndex[j] = (parameterIndex[j] + 1) % parameterValues[j].size(); } try { objects.add(constructor.newInstance(construct)); } catch (final Throwable t) { exceptions.add(t); } } if (objects.size() == 0) { for (final Throwable t : exceptions) { t.printStackTrace(); } throw new IllegalArgumentException("couldn't create test objects"); } LOGGER.info("{} objects created for {}", objects.size(), clazz); for (final Object o : objects) { LOGGER.debug("{}", o); } return objects; } @SuppressWarnings("rawtypes") private static <T> Collection<T> permuteTestObjects(final Class<T> clazz) { if (ManageableSecurity.class.isAssignableFrom(clazz)) { return permuteTestSecurities((Class) clazz); } return permuteTestObjects(clazz, getBiggestConstructor(clazz)); } private static <T extends ManageableSecurity> Collection<T> permuteTestSecurities(final Class<T> clazz) { intializeClass(clazz); final MetaBean mb = JodaBeanUtils.metaBean(clazz); final List<MetaProperty<?>> mps = new ArrayList<>(mb.metaPropertyMap().values()); // find the longest set of available data final List<?>[] parameterValues = new List<?>[mps.size()]; int longest = 0; for (int i = 0; i < mps.size(); i++) { final MetaProperty<?> metaProperty = mps.get(i); if (metaProperty.style().isSerializable() && "permissions".equals(metaProperty.name())) { parameterValues[i] = getTestObjects(metaProperty.propertyType(), null); } else { parameterValues[i] = getTestObjects(metaProperty.propertyType(), clazz); } if (parameterValues[i].size() > longest) { longest = parameterValues[i].size(); } } // prepare final List<Throwable> exceptions = new ArrayList<>(); final Collection<T> objects = new ArrayList<>(); final int[] parameterIndex = new int[mps.size()]; for (int i = 0; i < longest; i++) { try { final BeanBuilder<?> builder = mb.builder(); for (int j = 0; j < mps.size(); j++) { final Object value = parameterValues[j].get(parameterIndex[j]); parameterIndex[j] = (parameterIndex[j] + 1) % parameterValues[j].size(); final MetaProperty<?> metaProperty = mps.get(j); if (metaProperty.style().isSerializable() && metaProperty.name().equals("securityType") == false) { builder.set(metaProperty.name(), value); } } objects.add((T) builder.build()); } catch (final Throwable t) { exceptions.add(t); } } if (objects.size() == 0) { for (final Throwable t : exceptions) { t.printStackTrace(); } throw new IllegalArgumentException("couldn't create test objects"); } LOGGER.info("{} objects created for {}", objects.size(), clazz); for (final Object o : objects) { LOGGER.debug("{}", o); } return objects; } private static <T> void intializeClass(final Class<T> clazz) { // call the default constructor to initialize the class try { final Constructor<T> defaultConstructor = getDefaultConstructor(clazz); if (defaultConstructor != null) { defaultConstructor.setAccessible(true); defaultConstructor.newInstance(); } } catch (final Exception ex) { } } private static <T> Constructor<T> getDefaultConstructor(final Class<T> clazz) { Constructor<T> defaultConstructor = null; final Constructor<?>[] declaredConstructors = clazz.getDeclaredConstructors(); for (final Constructor<?> constructor : declaredConstructors) { final Class<?>[] parameterTypes = constructor.getParameterTypes(); if (parameterTypes.length == 0) { defaultConstructor = (Constructor<T>) constructor; break; } } return defaultConstructor; } protected abstract <T extends ManageableSecurity> void assertSecurity(final Class<T> securityClass, final T security); protected <T extends ManageableSecurity> void assertSecurities(final Class<T> securityClass, final Collection<T> securities) { String securityType = null; Class<?> c = securityClass; while (c != null) { try { securityType = (String) c.getDeclaredField("SECURITY_TYPE").get(null); } catch (final Throwable t) { // Ignore } try { securityType = (String) c.getDeclaredField("INDEX_TYPE").get(null); } catch (final Throwable t) { // Ignore } try { securityType = (String) c.getDeclaredField("METADATA_TYPE").get(null); } catch (final Throwable t) { // Ignore } c = c.getSuperclass(); } if (securityClass != RawSecurity.class) { assertNotNull(securityType); } for (final T security : securities) { assertSecurity(securityClass, security); } } protected <T extends ManageableSecurity> void assertSecurities(final Class<T> securityClass) { if (isInitialized()) { assertSecurities(securityClass, permuteTestSecurities(securityClass)); } } /** * Allow subclasses to block testing. * * @return true if initialized */ protected boolean isInitialized() { return true; } // SecurityMasterTestCaseMethods @Override public void testAgricultureFutureSecurity() { assertSecurities(AgricultureFutureSecurity.class); } @Override public void testBondFutureSecurity() { assertSecurities(BondFutureSecurity.class); } @Override public void testCashSecurity() { assertSecurities(CashSecurity.class); } @Override public void testCorporateBondSecurity() { assertSecurities(CorporateBondSecurity.class); } @Override public void testEnergyFutureSecurity() { assertSecurities(EnergyFutureSecurity.class); } @Override public void testEquityOptionSecurity() { assertSecurities(EquityOptionSecurity.class); } @Override public void testEquityBarrierOptionSecurity() { assertSecurities(EquityBarrierOptionSecurity.class); } @Override public void testEquitySecurity() { assertSecurities(EquitySecurity.class); } @Override public void testFRASecurity() { assertSecurities(FRASecurity.class); } @Override public void testFXFutureSecurity() { assertSecurities(FXFutureSecurity.class); } @Override public void testFXOptionSecurity() { assertSecurities(FXOptionSecurity.class); } @Override public void testNonDeliverableFXOptionSecurity() { assertSecurities(NonDeliverableFXOptionSecurity.class); } @Override public void testFXBarrierOptionSecurity() { assertSecurities(FXBarrierOptionSecurity.class); } @Override public void testForwardSwapSecurity() { assertSecurities(ForwardSwapSecurity.class); } @Override public void testIRFutureOptionSecurity() { assertSecurities(IRFutureOptionSecurity.class); } @Override public void testEquityIndexDividendFutureOptionSecurity() { assertSecurities(EquityIndexDividendFutureOptionSecurity.class); } @Override public void testGovernmentBondSecurity() { assertSecurities(GovernmentBondSecurity.class); } @Override public void testIndexFutureSecurity() { assertSecurities(IndexFutureSecurity.class); } @Override public void testInterestRateFutureSecurity() { assertSecurities(InterestRateFutureSecurity.class); } @Override public void testMetalFutureSecurity() { assertSecurities(MetalFutureSecurity.class); } @Override public void testMunicipalBondSecurity() { assertSecurities(MunicipalBondSecurity.class); } @Override public void testStockFutureSecurity() { assertSecurities(StockFutureSecurity.class); } @Override public void testSwaptionSecurity() { assertSecurities(SwaptionSecurity.class); } @Override public void testSwapSecurity() { assertSecurities(SwapSecurity.class); } @Override public void testEquityIndexOptionSecurity() { assertSecurities(EquityIndexOptionSecurity.class); } @Override public void testFXDigitalOptionSecurity() { assertSecurities(FXDigitalOptionSecurity.class); } @Override public void testFXForwardSecurity() { assertSecurities(FXForwardSecurity.class); } @Override public void testNonDeliverableFXForwardSecurity() { assertSecurities(NonDeliverableFXForwardSecurity.class); } @Override public void testCapFloorSecurity() { assertSecurities(CapFloorSecurity.class); } @Override public void testCapFloorCMSSpreadSecurity() { assertSecurities(CapFloorCMSSpreadSecurity.class); } @Override public void testRawSecurity() { assertSecurities(RawSecurity.class); } @Override public void testEquityVarianceSwapSecurity() { assertSecurities(EquityVarianceSwapSecurity.class); } @Override public void testCDSSecurity() { assertSecurities(CDSSecurity.class); } @Override public void testStandardFixedRecoveryCDSSecurity() { assertSecurities(StandardFixedRecoveryCDSSecurity.class); } @Override public void testStandardRecoveryLockCDSSecurity() { assertSecurities(StandardRecoveryLockCDSSecurity.class); } @Override public void testStandardVanillaCDSSecurity() { assertSecurities(StandardVanillaCDSSecurity.class); } @Override public void testLegacyFixedRecoveryCDSSecurity() { assertSecurities(LegacyFixedRecoveryCDSSecurity.class); } @Override public void testLegacyRecoveryLockCDSSecurity() { assertSecurities(LegacyRecoveryLockCDSSecurity.class); } @Override public void testLegacyVanillaCDSSecurity() { assertSecurities(LegacyVanillaCDSSecurity.class); } @Override public void testCashFlowSecurity() { assertSecurities(CashFlowSecurity.class); } @Override public void testCreditDefaultSwapIndexDefinitionSecurity() { assertSecurities(CreditDefaultSwapIndexDefinitionSecurity.class); } @Override public void testCreditDefaultSwapIndexSecurity() { assertSecurities(CreditDefaultSwapIndexSecurity.class); } @Override public void testCreditDefaultSwapOptionSecurity() { assertSecurities(CreditDefaultSwapOptionSecurity.class); } @Override public void testBondIndex() { assertSecurities(BondIndex.class); } @Override public void testEquityIndex() { assertSecurities(EquityIndex.class); } @Override public void testIborIndex() { assertSecurities(IborIndex.class); } @Override public void testOvernightIndex() { assertSecurities(OvernightIndex.class); } @Override public void testIndexFamily() { assertSecurities(IndexFamily.class); } }
/* * Copyright 2015 Ruben Gees * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rubengees.introduction.entity; import android.content.Context; import android.content.res.Resources; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.ColorInt; import android.support.annotation.ColorRes; import android.support.annotation.DrawableRes; import android.support.annotation.IntRange; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.support.v4.content.ContextCompat; /** * A bean which contains the data of a slide. * * @author Ruben Gees */ public class Slide implements Parcelable { private int position; private String title; private Integer titleResource; private String description; private Integer descriptionResource; private Integer imageResource; private Integer color; private Integer colorResource; private Option option; public Slide() { } /** * Sets the title of this Slide. * If a title resource was given before, it will be overridden. * * @param title The title. * @return The current instance. */ public Slide withTitle(@Nullable String title) { this.title = title; this.titleResource = null; return this; } /** * Sets the title resource for this Slide. * If a title was given before, it will be overridden. * * @param titleResource The title resource. * @return The current instance. */ public Slide withTitle(@StringRes int titleResource) { this.titleResource = titleResource; this.title = null; return this; } /** * Sets the title resource for this Slide. * If a title was given before, it will be overridden. * * @param titleResource The title resource. * @return The current instance. * @deprecated Use {@link #withTitle(int)} instead. */ @SuppressWarnings("unused") @Deprecated public Slide withTitleResource(@StringRes int titleResource) { this.titleResource = titleResource; this.title = null; return this; } /** * Sets the description of this slide. * If a description resource was given before, it will be overridden. * * @param description The description. * @return The current instance. */ public Slide withDescription(@Nullable String description) { this.description = description; this.descriptionResource = null; this.option = null; return this; } /** * Sets the description resource of this slide. * If a description was given before, it will be overridden. * * @param descriptionResource The description resource. * @return The current instance. */ public Slide withDescription(@StringRes int descriptionResource) { this.descriptionResource = descriptionResource; this.description = null; this.option = null; return this; } /** * Sets the description resource of this slide. * If a description was given before, it will be overridden. * * @param descriptionResource The description resource. * @return The current instance. * @deprecated Use {@link #withDescription(int)} instead. */ @SuppressWarnings("unused") @Deprecated public Slide withDescriptionResource(@StringRes int descriptionResource) { this.descriptionResource = descriptionResource; this.description = null; this.option = null; return this; } /** * Sets the image resource of this slide. * * @param imageResource The image resource. * @return The current instance. */ public Slide withImage(@DrawableRes int imageResource) { this.imageResource = imageResource; return this; } /** * Sets the image resource of this slide. * * @param imageResource The image resource. * @return The current instance. * @deprecated Use {@link #withImage(int)} instead. */ @SuppressWarnings("unused") @Deprecated public Slide withImageResource(@DrawableRes int imageResource) { this.imageResource = imageResource; return this; } /** * Sets the background color of this slide. * If a color resource was given before, it will be overridden. * * @param color The color. * @return The current instance. */ @SuppressWarnings("unused") public Slide withColor(@ColorInt int color) { this.color = color; this.colorResource = null; return this; } /** * Sets the background color resource of this slide. * If a color was given before, it will be overridden. * * @param colorResource The color resource. * @return The current instance. */ public Slide withColorResource(@ColorRes int colorResource) { this.colorResource = colorResource; this.color = null; return this; } /** * Sets the option of this slide. * Overrides a description or description resource. * * @param option The option. * @return The current instance. */ public Slide withOption(@Nullable Option option) { this.option = option; this.description = null; this.descriptionResource = null; return this; } public int getPosition() { return position; } public String getTitle() { return title; } public Integer getImageResource() { return imageResource; } public String getDescription() { return description; } public Integer getColor() { return color; } public Option getOption() { return option; } /** * Creates and retrieves all the needed data. Don't call this Method yourself. * * @param context The Context. * @param position The position of this slide. */ public void init(@NonNull Context context, @IntRange(from = 0) int position) { this.position = position; Resources resources = context.getResources(); if (titleResource != null) { title = resources.getString(titleResource); titleResource = null; } if (descriptionResource != null) { description = resources.getString(descriptionResource); descriptionResource = null; } if (colorResource != null) { color = ContextCompat.getColor(context, colorResource); colorResource = null; } if (option != null) { option.init(context, position); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Slide slide = (Slide) o; if (position != slide.position) return false; if (title != null ? !title.equals(slide.title) : slide.title != null) return false; if (titleResource != null ? !titleResource.equals(slide.titleResource) : slide.titleResource != null) return false; if (description != null ? !description.equals(slide.description) : slide.description != null) return false; if (descriptionResource != null ? !descriptionResource.equals(slide.descriptionResource) : slide.descriptionResource != null) return false; if (imageResource != null ? !imageResource.equals(slide.imageResource) : slide.imageResource != null) return false; if (color != null ? !color.equals(slide.color) : slide.color != null) return false; if (colorResource != null ? !colorResource.equals(slide.colorResource) : slide.colorResource != null) return false; return !(option != null ? !option.equals(slide.option) : slide.option != null); } @Override public int hashCode() { int result = position; result = 31 * result + (title != null ? title.hashCode() : 0); result = 31 * result + (titleResource != null ? titleResource.hashCode() : 0); result = 31 * result + (description != null ? description.hashCode() : 0); result = 31 * result + (descriptionResource != null ? descriptionResource.hashCode() : 0); result = 31 * result + (imageResource != null ? imageResource.hashCode() : 0); result = 31 * result + (color != null ? color.hashCode() : 0); result = 31 * result + (colorResource != null ? colorResource.hashCode() : 0); result = 31 * result + (option != null ? option.hashCode() : 0); return result; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.position); dest.writeString(this.title); dest.writeValue(this.titleResource); dest.writeString(this.description); dest.writeValue(this.descriptionResource); dest.writeValue(this.imageResource); dest.writeValue(this.color); dest.writeValue(this.colorResource); dest.writeParcelable(this.option, 0); } protected Slide(Parcel in) { this.position = in.readInt(); this.title = in.readString(); this.titleResource = (Integer) in.readValue(Integer.class.getClassLoader()); this.description = in.readString(); this.descriptionResource = (Integer) in.readValue(Integer.class.getClassLoader()); this.imageResource = (Integer) in.readValue(Integer.class.getClassLoader()); this.color = (Integer) in.readValue(Integer.class.getClassLoader()); this.colorResource = (Integer) in.readValue(Integer.class.getClassLoader()); this.option = in.readParcelable(Option.class.getClassLoader()); } public static final Parcelable.Creator<Slide> CREATOR = new Parcelable.Creator<Slide>() { public Slide createFromParcel(Parcel source) { return new Slide(source); } public Slide[] newArray(int size) { return new Slide[size]; } }; }
package com.tinkerpop.blueprints.impls.orient; import com.orientechnologies.common.util.OPair; import com.orientechnologies.orient.core.command.OCommandOutputListener; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.db.record.OIdentifiable; import com.orientechnologies.orient.core.db.record.ridbag.ORidBag; import com.orientechnologies.orient.core.exception.ORecordNotFoundException; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.metadata.OMetadata; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OImmutableClass; import com.orientechnologies.orient.core.metadata.schema.OSchema; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.record.impl.ODocumentInternal; import com.orientechnologies.orient.core.storage.impl.local.OStorageRecoverEventListener; import com.tinkerpop.blueprints.Direction; import java.util.Collection; import java.util.Iterator; /** * Repairs a graph. Current implementation scan the entire graph. In the future the WAL will be used to make this repair task much * faster. * * @author Luca Garulli * */ public class OGraphRepair { private class ORepairStats { long scannedEdges = 0; long removedEdges = 0; long scannedVertices = 0; long scannedLinks = 0; long removedLinks = 0; long repairedVertices = 0; } private OStorageRecoverEventListener eventListener; public void repair(final OrientBaseGraph graph, final OCommandOutputListener outputListener) { message(outputListener, "Repair of graph '" + graph.getRawGraph().getURL() + "' is started ...\n"); final long beginTime = System.currentTimeMillis(); final ORepairStats stats = new ORepairStats(); // SCAN AND CLEAN ALL THE EDGES FIRST (IF ANY) repairEdges(graph, stats, outputListener); // SCAN ALL THE VERTICES repairVertices(graph, stats, outputListener); message(outputListener, "Repair of graph '" + graph.getRawGraph().getURL() + "' completed in " + (System.currentTimeMillis() - beginTime / 1000) + " secs\n"); message(outputListener, " scannedEdges.....: " + stats.scannedEdges + "\n"); message(outputListener, " removedEdges.....: " + stats.removedEdges + "\n"); message(outputListener, " scannedVertices..: " + stats.scannedVertices + "\n"); message(outputListener, " scannedLinks.....: " + stats.scannedLinks + "\n"); message(outputListener, " removedLinks.....: " + stats.removedLinks + "\n"); message(outputListener, " repairedVertices.: " + stats.repairedVertices + "\n"); } protected void repairEdges(OrientBaseGraph graph, ORepairStats stats, OCommandOutputListener outputListener) { final ODatabaseDocumentTx db = graph.getRawGraph(); final OMetadata metadata = db.getMetadata(); final OSchema schema = metadata.getSchema(); final OrientConfigurableGraph.Settings settings = graph.settings; final boolean useVertexFieldsForEdgeLabels = settings.isUseVertexFieldsForEdgeLabels(); final OClass edgeClass = schema.getClass(OrientEdgeType.CLASS_NAME); if (edgeClass != null) { final long countEdges = db.countClass(edgeClass.getName()); message(outputListener, "Scanning " + countEdges + " edges...\n"); for (ODocument edge : db.browseClass(edgeClass.getName())) { final ORID edgeId = edge.getIdentity(); stats.scannedEdges++; if (eventListener != null) eventListener.onScannedEdge(edge); boolean removeEdge = false; final OIdentifiable out = OrientEdge.getConnection(edge, Direction.OUT); if (out == null) removeEdge = true; else { final ODocument outVertex = out.getRecord(); if (outVertex == null) removeEdge = true; final String outFieldName = OrientVertex.getConnectionFieldName(Direction.OUT, edge.getClassName(), useVertexFieldsForEdgeLabels); final Object outEdges = outVertex.field(outFieldName); if (outEdges == null) removeEdge = true; else if (outEdges instanceof ORidBag) { if (!((ORidBag) outEdges).contains(edgeId)) removeEdge = true; } else if (outEdges instanceof Collection) { if (!((Collection) outEdges).contains(edgeId)) removeEdge = true; } else if (outEdges instanceof OIdentifiable) { if (((OIdentifiable) outEdges).getIdentity().equals(edgeId)) removeEdge = true; } } final OIdentifiable in = OrientEdge.getConnection(edge, Direction.IN); if (in == null) removeEdge = true; else { final ODocument inVertex = in.getRecord(); if (inVertex == null) removeEdge = true; final String inFieldName = OrientVertex.getConnectionFieldName(Direction.IN, edge.getClassName(), useVertexFieldsForEdgeLabels); final Object inEdges = inVertex.field(inFieldName); if (inEdges == null) removeEdge = true; else if (inEdges instanceof ORidBag) { if (!((ORidBag) inEdges).contains(edgeId)) removeEdge = true; } else if (inEdges instanceof Collection) { if (!((Collection) inEdges).contains(edgeId)) removeEdge = true; } else if (inEdges instanceof OIdentifiable) { if (((OIdentifiable) inEdges).getIdentity().equals(edgeId)) removeEdge = true; } } if (removeEdge) { try { edge.delete(); stats.removedEdges++; if (eventListener != null) eventListener.onRemovedEdge(edge); } catch (Exception e) { message(outputListener, "Error on deleting edge " + edge.getIdentity() + " (" + e.getMessage() + ")"); } } } message(outputListener, "Scanning edges completed\n"); } } protected void repairVertices(OrientBaseGraph graph, ORepairStats stats, OCommandOutputListener outputListener) { final ODatabaseDocumentTx db = graph.getRawGraph(); final OMetadata metadata = db.getMetadata(); final OSchema schema = metadata.getSchema(); final OClass vertexClass = schema.getClass(OrientVertexType.CLASS_NAME); if (vertexClass != null) { final long countVertices = db.countClass(vertexClass.getName()); message(outputListener, "Scanning " + countVertices + " vertices...\n"); for (ODocument vertex : db.browseClass(vertexClass.getName())) { stats.scannedVertices++; if (eventListener != null) eventListener.onScannedVertex(vertex); final OrientVertex v = new OrientVertex(graph, vertex); boolean modifiedVertex = false; for (String fieldName : vertex.fieldNames()) { final OPair<Direction, String> connection = v.getConnection(Direction.BOTH, fieldName, null); if (connection == null) // SKIP THIS FIELD continue; final Object fieldValue = vertex.rawField(fieldName); if (fieldValue != null) { if (fieldValue instanceof OIdentifiable) { if (isEdgeBroken(vertex, fieldName, connection.getKey(), (OIdentifiable) fieldValue, stats, graph.settings.isUseVertexFieldsForEdgeLabels())) { modifiedVertex = true; vertex.field(fieldName, (Object) null); } } else if (fieldValue instanceof Collection<?>) { final Collection<?> coll = ((Collection<?>) fieldValue); for (Iterator<?> it = coll.iterator(); it.hasNext();) { final Object o = it.next(); if (isEdgeBroken(vertex, fieldName, connection.getKey(), (OIdentifiable) o, stats, graph.settings.isUseVertexFieldsForEdgeLabels())) { modifiedVertex = true; it.remove(); } } } else if (fieldValue instanceof ORidBag) { final ORidBag ridbag = ((ORidBag) fieldValue); for (Iterator<?> it = ridbag.rawIterator(); it.hasNext();) { final Object o = it.next(); if (isEdgeBroken(vertex, fieldName, connection.getKey(), (OIdentifiable) o, stats, graph.settings.isUseVertexFieldsForEdgeLabels())) { modifiedVertex = true; it.remove(); } } } } } if (modifiedVertex) { stats.repairedVertices++; if (eventListener != null) eventListener.onRepairedVertex(vertex); vertex.save(); } } message(outputListener, "Scanning vertices completed\n"); } } private void onScannedLink(ORepairStats stats, OIdentifiable fieldValue) { stats.scannedLinks++; if (eventListener != null) eventListener.onScannedLink(fieldValue); } private void onRemovedLink(ORepairStats stats, OIdentifiable fieldValue) { stats.removedLinks++; if (eventListener != null) eventListener.onRemovedLink(fieldValue); } public OStorageRecoverEventListener getEventListener() { return eventListener; } public OGraphRepair setEventListener(final OStorageRecoverEventListener eventListener) { this.eventListener = eventListener; return this; } private void message(final OCommandOutputListener outputListener, final String message) { if (outputListener != null) outputListener.onMessage(message); } private boolean isEdgeBroken(final OIdentifiable vertex, final String fieldName, final Direction direction, final OIdentifiable edgeRID, final ORepairStats stats, final boolean useVertexFieldsForEdgeLabels) { onScannedLink(stats, edgeRID); boolean broken = false; if (edgeRID == null) // RID NULL broken = true; else { ODocument record = null; try { record = edgeRID.getIdentity().getRecord(); } catch (ORecordNotFoundException e) { broken = true; } if (record == null) // RECORD DELETED broken = true; else { final OImmutableClass immutableClass = ODocumentInternal.getImmutableSchemaClass(record); if (immutableClass == null || (!immutableClass.isVertexType() && !immutableClass.isEdgeType())) // INVALID RECORD TYPE: NULL OR NOT GRAPH TYPE broken = true; else { if (immutableClass.isVertexType()) { // VERTEX -> LIGHTWEIGHT EDGE final String inverseFieldName = OrientVertex.getInverseConnectionFieldName(fieldName, useVertexFieldsForEdgeLabels); // CHECK THE VERTEX IS IN INVERSE EDGE CONTAINS final Object inverseEdgeContainer = record.field(inverseFieldName); if (inverseEdgeContainer == null) // NULL CONTAINER broken = true; else { if (inverseEdgeContainer instanceof OIdentifiable) { if (!inverseEdgeContainer.equals(vertex)) // NOT THE SAME broken = true; } else if (inverseEdgeContainer instanceof Collection<?>) { if (!((Collection) inverseEdgeContainer).contains(vertex)) // NOT IN COLLECTION broken = true; } else if (inverseEdgeContainer instanceof ORidBag) { if (!((ORidBag) inverseEdgeContainer).contains(vertex)) // NOT IN RIDBAG broken = true; } } } else { // EDGE -> REGULAR EDGE, OK final OIdentifiable backRID = OrientEdge.getConnection(record, direction); if (backRID == null || !backRID.equals(vertex)) // BACK RID POINTS TO ANOTHER VERTEX broken = true; } } } } if (broken) { onRemovedLink(stats, edgeRID); return true; } return false; } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.eclipse.editors.outline; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.drools.compiler.compiler.DroolsParserException; import org.drools.eclipse.DRLInfo; import org.drools.eclipse.DroolsEclipsePlugin; import org.drools.eclipse.core.DroolsElement; import org.drools.eclipse.core.DroolsModelBuilder; import org.drools.eclipse.core.Package; import org.drools.eclipse.core.RuleSet; import org.drools.eclipse.core.ui.DroolsContentProvider; import org.drools.eclipse.core.ui.DroolsGroupByRuleGroupContentProvider; import org.drools.eclipse.core.ui.DroolsLabelProvider; import org.drools.eclipse.core.ui.DroolsTreeSorter; import org.drools.eclipse.core.ui.FilterActionGroup; import org.drools.eclipse.editors.AbstractRuleEditor; import org.drools.compiler.lang.descr.AttributeDescr; import org.drools.compiler.lang.descr.PackageDescr; import org.drools.compiler.lang.descr.RuleDescr; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IStatusLineManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.views.contentoutline.ContentOutlinePage; /** * Simple outline view of a DRL file. At present this is not wired in with the Parser, so it is fault * tolerant of incorrect syntax. * Should provide navigation assistance in large rule files. */ public class RuleContentOutlinePage extends ContentOutlinePage { private AbstractRuleEditor editor; private RuleSet ruleSet = DroolsModelBuilder.createRuleSet(); private Map<String, RuleDescr> rules; private boolean groupByRulegroup = false; private TreeViewer viewer = null; /////////////////////////////////// // Patterns that the parser uses // TODO: this should just reuse the existing parser to avoid inconsistencies // with for example comments /////////////////////////////////// private static final Pattern RULE_PATTERN1 = Pattern.compile( "\\n\\s*rule\\s+\"([^\"]+)\"", Pattern.DOTALL); private static final Pattern RULE_PATTERN2 = Pattern.compile( "\\n\\s*rule\\s+([^\\s;#\"]+)", Pattern.DOTALL); private static final Pattern PACKAGE_PATTERN = Pattern.compile( "\\s*package\\s+([^\\s;#]+);?", Pattern.DOTALL); private static final Pattern FUNCTION_PATTERN = Pattern.compile( "\\n\\s*function\\s+(\\S+)\\s+(\\S+)\\(.*?\\)", Pattern.DOTALL); private static final Pattern TEMPLATE_PATTERN = Pattern.compile( "\\n\\s*template\\s+([^\\s;#\"]+)", Pattern.DOTALL); private static final Pattern IMPORT_PATTERN = Pattern.compile( "\\n\\s*import\\s+([^\\s;#]+);?", Pattern.DOTALL); private static final Pattern EXPANDER_PATTERN = Pattern.compile( "\\n\\s*expander\\s+([^\\s;#]+);?", Pattern.DOTALL); private static final Pattern GLOBAL_PATTERN = Pattern.compile( "\\n\\s*global\\s+(\\S+)\\s+([^\\s;#]+);?", Pattern.DOTALL); private static final Pattern QUERY_PATTERN1 = Pattern.compile( "\\n\\s*query\\s+\"([^\"]+)\"", Pattern.DOTALL); private static final Pattern QUERY_PATTERN2 = Pattern.compile( "\\n\\s*query\\s+([^\\s;#\"]+)", Pattern.DOTALL); public RuleContentOutlinePage(AbstractRuleEditor editor) { this.editor = editor; } DroolsContentProvider contentProvider = null; DroolsGroupByRuleGroupContentProvider groupByRuleGroupContentProvider = null; private void setContentProvider() { IPreferenceStore preferenceStore= DroolsEclipsePlugin.getDefault().getPreferenceStore(); groupByRulegroup = preferenceStore.getBoolean("GroupByRuleGroupAction.isChecked"); contentProvider = new DroolsContentProvider(); groupByRuleGroupContentProvider = new DroolsGroupByRuleGroupContentProvider(); if (groupByRulegroup) { viewer.setContentProvider(groupByRuleGroupContentProvider); } else { viewer.setContentProvider(contentProvider); } } public void createControl(Composite parent) { super.createControl(parent); viewer = getTreeViewer(); setContentProvider(); viewer.setLabelProvider(new DroolsLabelProvider()); viewer.setSorter(new DroolsTreeSorter()); viewer.setInput(ruleSet); FilterActionGroup filterActionGroup = new FilterActionGroup( viewer, "org.drools.eclipse.editors.outline.RuleContentOutlinePage"); filterActionGroup.fillActionBars(getSite().getActionBars()); update(); // add the listener for navigation of the rule document. super.addSelectionChangedListener(new ISelectionChangedListener() { public void selectionChanged(SelectionChangedEvent event) { Object selectionObj = event.getSelection(); if (selectionObj != null && selectionObj instanceof StructuredSelection) { StructuredSelection sel = (StructuredSelection) selectionObj; DroolsElement element = (DroolsElement) sel.getFirstElement(); if (element != null) { editor.selectAndReveal(element.getOffset(), element.getLength()); } } } }); } /** * Updates the outline page. */ public void update() { TreeViewer viewer = getTreeViewer(); if (viewer != null) { Control control = viewer.getControl(); if (control != null && !control.isDisposed()) { initRules(); populatePackageTreeNode(); viewer.refresh(); control.setRedraw(false); viewer.expandToLevel(2); control.setRedraw(true); } } } /** * populates the PackageTreeNode with all of its child elements * * @param packageTreeNode the node to populate */ public void populatePackageTreeNode() { String ruleFileContents = editor.getContent(); populatePackageTreeNode(ruleFileContents); } void populatePackageTreeNode(String ruleFileContents) { DroolsModelBuilder.clearRuleSet(ruleSet); Matcher matcher = PACKAGE_PATTERN.matcher(ruleFileContents); String packageName = null; int startChar = 0; int endChar = 0; if (matcher.find()) { packageName = matcher.group(1); startChar = matcher.start(1); endChar = matcher.end(1); } Package pkg = DroolsModelBuilder.addPackage(ruleSet, packageName, startChar, endChar - startChar); matcher = RULE_PATTERN1.matcher(ruleFileContents); while (matcher.find()) { String ruleName = matcher.group(1); RuleDescr descr = (RuleDescr) rules.get(ruleName); if (descr != null) { DroolsModelBuilder.addRule(pkg, ruleName, null, matcher.start(1), matcher.end(1) - matcher.start(1), extractAttributes(descr)); } } matcher = RULE_PATTERN2.matcher(ruleFileContents); while (matcher.find()) { String ruleName = matcher.group(1); RuleDescr descr = (RuleDescr) rules.get(ruleName); if (descr != null) { DroolsModelBuilder.addRule(pkg, ruleName, null, matcher.start(1), matcher.end(1) - matcher.start(1), extractAttributes(descr)); } } matcher = FUNCTION_PATTERN.matcher(ruleFileContents); while (matcher.find()) { String functionName = matcher.group(2); DroolsModelBuilder.addFunction(pkg, functionName + "()", null, matcher.start(2), matcher.end(2) - matcher.start(2)); } matcher = EXPANDER_PATTERN.matcher(ruleFileContents); if (matcher.find()) { String expanderName = matcher.group(1); DroolsModelBuilder.addExpander(pkg, expanderName, null, matcher.start(1), matcher.end(1) - matcher.start(1)); } matcher = IMPORT_PATTERN.matcher(ruleFileContents); while (matcher.find()) { String importName = matcher.group(1); DroolsModelBuilder.addImport(pkg, importName, null, matcher.start(1), matcher.end(1) - matcher.start(1)); } matcher = GLOBAL_PATTERN.matcher(ruleFileContents); while (matcher.find()) { String globalType = matcher.group(1); String globalName = matcher.group(2); String name = globalName + " : " + globalType; DroolsModelBuilder.addGlobal(pkg, name, null, matcher.start(2), matcher.end(2) - matcher.start(2)); } matcher = QUERY_PATTERN1.matcher(ruleFileContents); while (matcher.find()) { String queryName = matcher.group(1); DroolsModelBuilder.addQuery(pkg, queryName, null, matcher.start(1), matcher.end(1) - matcher.start(1)); } matcher = QUERY_PATTERN2.matcher(ruleFileContents); while (matcher.find()) { String queryName = matcher.group(1); DroolsModelBuilder.addQuery(pkg, queryName, null, matcher.start(1), matcher.end(1) - matcher.start(1)); } matcher = TEMPLATE_PATTERN.matcher(ruleFileContents); while (matcher.find()) { String templateName = matcher.group(1); DroolsModelBuilder.addTemplate(pkg, templateName, null, matcher.start(1), matcher.end(1) - matcher.start(1)); } } RuleSet getRuleSet() { return ruleSet; } private Map<String, String> extractAttributes(RuleDescr ruleDescr) { Map<String, String> attributes = null; if (ruleDescr != null) { attributes = new HashMap<String, String>(); for (AttributeDescr attribute: ruleDescr.getAttributes().values()) { if (attribute != null && attribute.getName() != null) { attributes.put(attribute.getName(), attribute.getValue()); } } } return attributes; } public void initRules() { rules = new HashMap<String, RuleDescr>(); try { DRLInfo drlInfo = DroolsEclipsePlugin.getDefault().parseResource(editor, true, false); if (drlInfo != null) { PackageDescr packageDescr = drlInfo.getPackageDescr(); if (packageDescr != null) { for (RuleDescr ruleDescr: packageDescr.getRules()) { if (ruleDescr != null && ruleDescr.getName() != null) { rules.put(ruleDescr.getName(), ruleDescr); } } } } } catch (DroolsParserException e) { DroolsEclipsePlugin.log(e); } } class GroupByRuleGroupAction extends Action { public GroupByRuleGroupAction() { super(); setText("Group by Rule Group"); setToolTipText("Group by Rule Group"); setDescription("Group by agenda-group, activation-group or ruleflow-group"); setChecked(groupByRulegroup); } @Override public void run() { setGroupByRuleGroup(!groupByRulegroup); } private void setGroupByRuleGroup(boolean groupBy) { groupByRulegroup = groupBy; setChecked(groupBy); IPreferenceStore preferenceStore= DroolsEclipsePlugin.getDefault().getPreferenceStore(); preferenceStore.setValue("GroupByRuleGroupAction.isChecked", groupBy); setContentProvider(); viewer.refresh(true); } } @Override public void makeContributions(IMenuManager menuManager, IToolBarManager toolBarManager, IStatusLineManager statusLineManager) { // TODO Auto-generated method stub GroupByRuleGroupAction groupByAction = new GroupByRuleGroupAction (); menuManager.add(groupByAction); super.makeContributions(menuManager, toolBarManager, statusLineManager); } }
package com.tngtech.archunit.core.importer; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.Set; import java.util.jar.JarFile; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.tngtech.archunit.testutil.SystemPropertiesRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Sets.union; import static java.util.jar.Attributes.Name.CLASS_PATH; import static org.assertj.core.api.Assertions.assertThat; public class UrlSourceTest { static final String JAVA_CLASS_PATH_PROP = "java.class.path"; private static final String JAVA_BOOT_PATH_PROP = "sun.boot.class.path"; private static final char CHARACTER_THAT_IS_HOPEFULLY_ILLEGAL_ON_EVERY_PLATFORM = '\0'; @Rule public final SystemPropertiesRule systemPropertiesRule = new SystemPropertiesRule(); @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test public void resolves_from_system_property() throws MalformedURLException { Path firstFileEntry = Paths.get("some", "path", "classes"); Path firstJarEntry = Paths.get("other", "lib", "some.jar"); Path secondFileEntry = Paths.get("more", "classes"); Path secondJarEntry = Paths.get("my", ".m2", "repo", "greatlib.jar"); String classPath = createClassPathProperty(firstFileEntry.toString(), firstJarEntry.toString(), secondFileEntry.toString(), secondJarEntry.toString()); System.setProperty(JAVA_CLASS_PATH_PROP, classPath); Path bootstrapFileEntry = Paths.get("some", "bootstrap", "classes"); Path bootstrapJarEntry = Paths.get("more", "bootstrap", "bootlib.jar"); String bootstrapClassPath = createClassPathProperty(bootstrapFileEntry.toString(), bootstrapJarEntry.toString()); System.setProperty(JAVA_BOOT_PATH_PROP, bootstrapClassPath); UrlSource urlSource = UrlSource.From.classPathSystemProperties(); assertThat(urlSource).containsOnly( toUrl(firstFileEntry), new URL("jar:" + firstJarEntry.toUri() + "!/"), toUrl(secondFileEntry), new URL("jar:" + secondJarEntry.toUri() + "!/"), toUrl(bootstrapFileEntry), new URL("jar:" + bootstrapJarEntry.toUri() + "!/") ); } @Test public void resolves_missing_system_properties_resiliently() { System.clearProperty(JAVA_BOOT_PATH_PROP); System.clearProperty(JAVA_CLASS_PATH_PROP); assertThat(UrlSource.From.classPathSystemProperties()).isEmpty(); } @Test public void ignores_invalid_paths_in_class_path_property() { Path valid = Paths.get("some", "valid", "path"); String classPath = createClassPathProperty(valid.toString(), "/invalid/path/because/of/" + CHARACTER_THAT_IS_HOPEFULLY_ILLEGAL_ON_EVERY_PLATFORM + "/"); System.setProperty(JAVA_CLASS_PATH_PROP, classPath); System.clearProperty(JAVA_BOOT_PATH_PROP); assertThat(UrlSource.From.classPathSystemProperties()).containsOnly(toUrl(valid)); } @Test public void returns_unique_urls() { URL url = getClass().getResource("."); ImmutableList<URL> redundantInput = ImmutableList.of(url, url); UrlSource source = UrlSource.From.iterable(redundantInput); assertThat(source).hasSize(1).containsOnly(url); } @Test public void handles_paths_with_spaces() throws Exception { Path path_with_spaces = temporaryFolder.newFolder("path with spaces").toPath(); Path destination = path_with_spaces.resolve(getClass().getName() + ".class"); Files.copy(Paths.get(LocationTest.urlOfClass(getClass()).toURI()), destination); String classPath = createClassPathProperty(destination.toString()); System.setProperty(JAVA_CLASS_PATH_PROP, classPath); UrlSource urls = UrlSource.From.classPathSystemProperties(); assertThat(urls).contains(toUrl(destination)); } @Test public void handles_jar_uri_with_spaces() throws Exception { File folderWithSpaces = temporaryFolder.newFolder("folder with spaces"); File folder = temporaryFolder.newFolder(); WrittenJarFile jarInFolderWithSpaces = writeJarWithManifestClasspathAttribute(folderWithSpaces, "folder-with-spaces"); WrittenJarFile parentJar = writeJarWithManifestClasspathAttribute(folder, "parent", ManifestClasspathEntry.absoluteUrl(jarInFolderWithSpaces.path)); System.setProperty(JAVA_CLASS_PATH_PROP, parentJar.path.toString()); UrlSource urls = UrlSource.From.classPathSystemProperties(); assertThat(urls).containsAll(concat(parentJar.getExpectedClasspathUrls(), jarInFolderWithSpaces.getExpectedClasspathUrls())); } @Test public void recursively_resolves_classpath_attributes_in_manifests() throws Exception { File folder = temporaryFolder.newFolder(); WrittenJarFile grandChildOne = writeJarWithManifestClasspathAttribute(folder, subpath("grandchild", "one")); WrittenJarFile grandChildTwo = writeJarWithManifestClasspathAttribute(folder, subpath("grandchild", "two")); WrittenJarFile grandChildThree = writeJarWithManifestClasspathAttribute(folder, subpath("grandchild", "three")); WrittenJarFile childOne = writeJarWithManifestClasspathAttribute(folder, subpath("child", "one"), grandChildOne.getPathAsAbsoluteUrl(), ManifestClasspathEntry.relativeUrl(grandChildTwo.path)); WrittenJarFile childTwo = writeJarWithManifestClasspathAttribute(folder, subpath("child", "two"), ManifestClasspathEntry.absoluteUrl(grandChildThree.path)); WrittenJarFile parent = writeJarWithManifestClasspathAttribute(folder, "parent", ManifestClasspathEntry.relativePath(childOne.path), ManifestClasspathEntry.absoluteUrl(childTwo.path)); System.setProperty(JAVA_CLASS_PATH_PROP, parent.path.toString()); UrlSource urls = UrlSource.From.classPathSystemProperties(); assertThat(urls).containsAll(concat( grandChildOne.getExpectedClasspathUrls(), grandChildTwo.getExpectedClasspathUrls(), grandChildThree.getExpectedClasspathUrls(), childOne.getExpectedClasspathUrls(), childTwo.getExpectedClasspathUrls(), parent.getExpectedClasspathUrls())); } @Test public void terminates_recursively_resolving_manifest_classpaths_if_manifests_have_circular_reference() throws Exception { File folder = temporaryFolder.newFolder(); File jarOnePath = new File(folder, "one.jar"); File jarTwoPath = new File(folder, "two.jar"); JarFile jarOne = new TestJarFile() .withManifestAttribute(CLASS_PATH, jarTwoPath.getAbsolutePath()) .create(jarOnePath); JarFile jarTwo = new TestJarFile() .withManifestAttribute(CLASS_PATH, jarOnePath.getAbsolutePath()) .create(jarTwoPath); System.setProperty(JAVA_CLASS_PATH_PROP, jarOne.getName()); System.clearProperty(JAVA_BOOT_PATH_PROP); UrlSource urls = UrlSource.From.classPathSystemProperties(); assertThat(urls).containsOnly(toUrl(Paths.get(jarOne.getName())), toUrl(Paths.get(jarTwo.getName()))); } private String subpath(String... parts) { return Joiner.on(File.separator).join(parts); } private WrittenJarFile writeJarWithManifestClasspathAttribute(final File folder, String identifier, ManifestClasspathEntry... additionalClasspathManifestClasspathEntries) { Set<ManifestClasspathEntry> classpathManifestEntries = union(createManifestClasspathEntries(identifier), ImmutableSet.copyOf(additionalClasspathManifestClasspathEntries)); JarFile jarFile = new TestJarFile() .withManifestAttribute(CLASS_PATH, Joiner.on(" ").join(FluentIterable.from(classpathManifestEntries).transform(resolveTo(folder)).toSet())) .create(new File(folder, identifier.replace(File.separator, "-") + ".jar")); return new WrittenJarFile(Paths.get(jarFile.getName()), classpathManifestEntries); } private Function<ManifestClasspathEntry, String> resolveTo(final File folder) { return new Function<ManifestClasspathEntry, String>() { @Override public String apply(ManifestClasspathEntry manifestClasspathEntry) { return manifestClasspathEntry.create(folder); } }; } private Set<ManifestClasspathEntry> createManifestClasspathEntries(String infix) { Set<ManifestClasspathEntry> result = new HashSet<>(); for (int i = 0; i < 10; i++) { result.add(ManifestClasspathEntry .absolutePath(Paths.get(File.separator + subpath("some", "path", "parent", infix + i, "")).toAbsolutePath())); } return result; } private String createClassPathProperty(String... paths) { return Joiner.on(File.pathSeparatorChar).join(paths); } private static class WrittenJarFile { private final Path path; private final Set<ManifestClasspathEntry> classpathManifestEntries; private WrittenJarFile(Path path, Set<ManifestClasspathEntry> classpathManifestEntries) { this.path = path; this.classpathManifestEntries = classpathManifestEntries; } public ManifestClasspathEntry getPathAsAbsoluteUrl() { return ManifestClasspathEntry.absoluteUrl(path); } public Iterable<URL> getExpectedClasspathUrls() { return FluentIterable.from(classpathManifestEntries) .transform(new Function<ManifestClasspathEntry, URL>() { @Override public URL apply(ManifestClasspathEntry input) { return input.toExpectedClasspathUrl(); } }); } } private abstract static class ManifestClasspathEntry { final Path path; protected ManifestClasspathEntry(Path path) { this.path = path; } abstract String create(File folder); static ManifestClasspathEntry absoluteUrl(Path path) { return new AbsoluteUrl(path); } static ManifestClasspathEntry absolutePath(Path path) { return new AbsolutePath(path); } static ManifestClasspathEntry relativeUrl(Path path) { return new RelativeUrl(path); } static ManifestClasspathEntry relativePath(Path path) { return new RelativePath(path); } public abstract URL toExpectedClasspathUrl(); private static class AbsoluteUrl extends ManifestClasspathEntry { private AbsoluteUrl(Path path) { super(path); checkArgument(path.isAbsolute(), "Path is not absolute: %s", path); } @Override String create(File folder) { return ensureTrailingSeparatorForFolders(path.toUri().toString()); } @Override public URL toExpectedClasspathUrl() { return toUrl(path); } } private static class AbsolutePath extends ManifestClasspathEntry { private AbsolutePath(Path path) { super(path); checkArgument(path.isAbsolute(), "Path is not absolute: %s", path); } @Override String create(File folder) { String pathString = path.toString(); return ensureTrailingSeparatorForFolders(pathString); } @Override public URL toExpectedClasspathUrl() { return toUrl(path); } } private static class RelativeUrl extends ManifestClasspathEntry { private URL expectedClasspathUrl; private RelativeUrl(Path path) { super(path); } @Override String create(File folder) { String relativePath = new RelativePath(path).create(folder); this.expectedClasspathUrl = toUrl(folder.toPath().resolve(relativePath)); return "file:" + relativePath; } @Override public URL toExpectedClasspathUrl() { return expectedClasspathUrl; } } private static class RelativePath extends ManifestClasspathEntry { private URL expectedClasspathUrl; private RelativePath(Path path) { super(path); } @Override String create(File folder) { Path parent = folder.toPath(); Path relativePath = parent.relativize(path); this.expectedClasspathUrl = toUrl(parent.resolve(relativePath)); return ensureTrailingSeparatorForFolders(relativePath.toString()); } @Override public URL toExpectedClasspathUrl() { return expectedClasspathUrl; } } } private static String ensureTrailingSeparatorForFolders(String pathString) { boolean isFolder = !pathString.matches(".*\\.\\w+$"); return isFolder ? ensureTrailingSeparator(pathString) : pathString; } private static String ensureTrailingSeparator(String pathString) { return pathString.endsWith(File.separator) ? pathString : pathString + File.separator; } private static URL toUrl(Path path) { try { URL result = path.toUri().toURL(); return result.toString().endsWith(".jar") ? new URL("jar:" + result + "!/") : result; } catch (MalformedURLException e) { throw new RuntimeException(e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.osgi; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Dictionary; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.concurrent.ConcurrentHashMap; import org.apache.camel.CamelContext; import org.apache.camel.Component; import org.apache.camel.Converter; import org.apache.camel.TypeConverter; import org.apache.camel.TypeConverterLoaderException; import org.apache.camel.impl.converter.AnnotationTypeConverterLoader; import org.apache.camel.impl.osgi.tracker.BundleTracker; import org.apache.camel.impl.osgi.tracker.BundleTrackerCustomizer; import org.apache.camel.impl.scan.AnnotatedWithPackageScanFilter; import org.apache.camel.model.DataFormatDefinition; import org.apache.camel.spi.ComponentResolver; import org.apache.camel.spi.DataFormat; import org.apache.camel.spi.DataFormatResolver; import org.apache.camel.spi.Injector; import org.apache.camel.spi.Language; import org.apache.camel.spi.LanguageResolver; import org.apache.camel.spi.PackageScanFilter; import org.apache.camel.spi.TypeConverterLoader; import org.apache.camel.spi.TypeConverterRegistry; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.StringHelper; import org.osgi.framework.Bundle; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleEvent; import org.osgi.framework.ServiceRegistration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Activator implements BundleActivator, BundleTrackerCustomizer { public static final String META_INF_COMPONENT = "META-INF/services/org/apache/camel/component/"; public static final String META_INF_LANGUAGE = "META-INF/services/org/apache/camel/language/"; public static final String META_INF_LANGUAGE_RESOLVER = "META-INF/services/org/apache/camel/language/resolver/"; public static final String META_INF_DATAFORMAT = "META-INF/services/org/apache/camel/dataformat/"; public static final String META_INF_TYPE_CONVERTER = "META-INF/services/org/apache/camel/TypeConverter"; public static final String META_INF_FALLBACK_TYPE_CONVERTER = "META-INF/services/org/apache/camel/FallbackTypeConverter"; private static final transient Logger LOG = LoggerFactory.getLogger(Activator.class); private BundleTracker tracker; private Map<Long, List<BaseService>> resolvers = new ConcurrentHashMap<Long, List<BaseService>>(); public void start(BundleContext context) throws Exception { LOG.info("Camel activator starting"); tracker = new BundleTracker(context, Bundle.ACTIVE, this); tracker.open(); LOG.info("Camel activator started"); } public void stop(BundleContext context) throws Exception { LOG.info("Camel activator stopping"); tracker.close(); LOG.info("Camel activator stopped"); } public Object addingBundle(Bundle bundle, BundleEvent event) { LOG.debug("Bundle started: {}", bundle.getSymbolicName()); List<BaseService> r = new ArrayList<BaseService>(); registerComponents(bundle, r); registerLanguages(bundle, r); registerDataFormats(bundle, r); registerTypeConverterLoader(bundle, r); for (BaseService service : r) { service.register(); } resolvers.put(bundle.getBundleId(), r); return bundle; } public void modifiedBundle(Bundle bundle, BundleEvent event, Object object) { } public void removedBundle(Bundle bundle, BundleEvent event, Object object) { LOG.debug("Bundle stopped: {}", bundle.getSymbolicName()); List<BaseService> r = resolvers.remove(bundle.getBundleId()); if (r != null) { for (BaseService service : r) { service.unregister(); } } } protected void registerComponents(Bundle bundle, List<BaseService> resolvers) { if (checkCompat(bundle, Component.class)) { Map<String, String> components = new HashMap<String, String>(); for (Enumeration e = bundle.getEntryPaths(META_INF_COMPONENT); e != null && e.hasMoreElements();) { String path = (String) e.nextElement(); LOG.debug("Found entry: {} in bundle {}", path, bundle.getSymbolicName()); String name = path.substring(path.lastIndexOf("/") + 1); components.put(name, path); } if (!components.isEmpty()) { resolvers.add(new BundleComponentResolver(bundle, components)); } } } protected void registerLanguages(Bundle bundle, List<BaseService> resolvers) { if (checkCompat(bundle, Language.class)) { Map<String, String> languages = new HashMap<String, String>(); for (Enumeration e = bundle.getEntryPaths(META_INF_LANGUAGE); e != null && e.hasMoreElements();) { String path = (String) e.nextElement(); LOG.debug("Found entry: {} in bundle {}", path, bundle.getSymbolicName()); String name = path.substring(path.lastIndexOf("/") + 1); languages.put(name, path); } if (!languages.isEmpty()) { resolvers.add(new BundleLanguageResolver(bundle, languages)); } for (Enumeration e = bundle.getEntryPaths(META_INF_LANGUAGE_RESOLVER); e != null && e.hasMoreElements();) { String path = (String) e.nextElement(); LOG.debug("Found entry: {} in bundle {}", path, bundle.getSymbolicName()); String name = path.substring(path.lastIndexOf("/") + 1); resolvers.add(new BundleMetaLanguageResolver(bundle, name, path)); } } } protected void registerDataFormats(Bundle bundle, List<BaseService> resolvers) { if (checkCompat(bundle, DataFormat.class)) { Map<String, String> dataformats = new HashMap<String, String>(); for (Enumeration e = bundle.getEntryPaths(META_INF_DATAFORMAT); e != null && e.hasMoreElements();) { String path = (String) e.nextElement(); LOG.debug("Found entry: {} in bundle {}", path, bundle.getSymbolicName()); String name = path.substring(path.lastIndexOf("/") + 1); dataformats.put(name, path); } if (!dataformats.isEmpty()) { resolvers.add(new BundleDataFormatResolver(bundle, dataformats)); } } } protected void registerTypeConverterLoader(Bundle bundle, List<BaseService> resolvers) { if (checkCompat(bundle, TypeConverter.class)) { URL url1 = bundle.getEntry(META_INF_TYPE_CONVERTER); URL url2 = bundle.getEntry(META_INF_FALLBACK_TYPE_CONVERTER); if (url1 != null || url2 != null) { resolvers.add(new BundleTypeConverterLoader(bundle)); } } } protected static class BundleComponentResolver extends BaseResolver<Component> implements ComponentResolver { private final Map<String, String> components; public BundleComponentResolver(Bundle bundle, Map<String, String> components) { super(bundle, Component.class); this.components = components; } public Component resolveComponent(String name, CamelContext context) throws Exception { return createInstance(name, components.get(name), context); } public void register() { doRegister(ComponentResolver.class, "component", components.keySet()); } } protected static class BundleLanguageResolver extends BaseResolver<Language> implements LanguageResolver { private final Map<String, String> languages; public BundleLanguageResolver(Bundle bundle, Map<String, String> languages) { super(bundle, Language.class); this.languages = languages; } public Language resolveLanguage(String name, CamelContext context) { return createInstance(name, languages.get(name), context); } public void register() { doRegister(LanguageResolver.class, "language", languages.keySet()); } } protected static class BundleMetaLanguageResolver extends BaseResolver<LanguageResolver> implements LanguageResolver { private final String name; private final String path; public BundleMetaLanguageResolver(Bundle bundle, String name, String path) { super(bundle, LanguageResolver.class); this.name = name; this.path = path; } public Language resolveLanguage(String name, CamelContext context) { LanguageResolver resolver = createInstance(this.name, path, context); return resolver.resolveLanguage(name, context); } public void register() { doRegister(LanguageResolver.class, "resolver", name); } } protected static class BundleDataFormatResolver extends BaseResolver<DataFormat> implements DataFormatResolver { private final Map<String, String> dataformats; public BundleDataFormatResolver(Bundle bundle, Map<String, String> dataformats) { super(bundle, DataFormat.class); this.dataformats = dataformats; } public DataFormat resolveDataFormat(String name, CamelContext context) { return createInstance(name, dataformats.get(name), context); } public DataFormatDefinition resolveDataFormatDefinition(String name, CamelContext context) { return null; } public void register() { doRegister(DataFormatResolver.class, "dataformat", dataformats.keySet()); } } protected static class BundleTypeConverterLoader extends BaseResolver<TypeConverter> implements TypeConverterLoader { private final AnnotationTypeConverterLoader loader = new Loader(); private final Bundle bundle; public BundleTypeConverterLoader(Bundle bundle) { super(bundle, TypeConverter.class); ObjectHelper.notNull(bundle, "bundle"); this.bundle = bundle; } public synchronized void load(TypeConverterRegistry registry) throws TypeConverterLoaderException { // must be synchronized to ensure we don't load type converters concurrently // which cause Camel apps to fails in OSGi thereafter try { loader.load(registry); } catch (Exception e) { throw new TypeConverterLoaderException("Cannot load type converters using OSGi bundle: " + bundle.getBundleId(), e); } } public void register() { doRegister(TypeConverterLoader.class); } class Loader extends AnnotationTypeConverterLoader { Loader() { super(null); } @SuppressWarnings("unchecked") public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException { PackageScanFilter test = new AnnotatedWithPackageScanFilter(Converter.class, true); Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); Set<String> packages = getConverterPackages(bundle.getEntry(META_INF_TYPE_CONVERTER)); if (LOG.isTraceEnabled()) { LOG.trace("Found {} {} packages: {}", new Object[]{packages.size(), META_INF_TYPE_CONVERTER, packages}); } // if we only have camel-core on the classpath then we have already pre-loaded all its type converters // but we exposed the "org.apache.camel.core" package in camel-core. This ensures there is at least one // packageName to scan, which triggers the scanning process. That allows us to ensure that we look for // META-INF/services in all the JARs. if (packages.size() == 1 && "org.apache.camel.core".equals(packages.iterator().next())) { LOG.debug("No additional package names found in classpath for annotated type converters."); // no additional package names found to load type converters so break out return; } // now filter out org.apache.camel.core as its not needed anymore (it was just a dummy) packages.remove("org.apache.camel.core"); for (String pkg : packages) { if (StringHelper.hasUpperCase(pkg)) { // its a FQN class name so load it directly LOG.trace("Loading {} class", pkg); try { Class clazz = bundle.loadClass(pkg); if (test.matches(clazz)) { classes.add(clazz); } // the class could be found and loaded so continue to next continue; } catch (Throwable t) { // Ignore LOG.trace("Failed to load " + pkg + " class due " + t.getMessage() + ". This exception will be ignored.", t); } } // its not a FQN but a package name so scan for classes in the bundle Enumeration<URL> e = bundle.findEntries("/" + pkg.replace('.', '/'), "*.class", true); while (e != null && e.hasMoreElements()) { String path = e.nextElement().getPath(); String externalName = path.substring(path.charAt(0) == '/' ? 1 : 0, path.indexOf('.')).replace('/', '.'); LOG.trace("Loading {} class", externalName); try { Class clazz = bundle.loadClass(externalName); if (test.matches(clazz)) { classes.add(clazz); } } catch (Throwable t) { // Ignore LOG.trace("Failed to load " + externalName + " class due " + t.getMessage() + ". This exception will be ignored.", t); } } } // load the classes into type converter registry LOG.info("Found {} @Converter classes to load", classes.size()); for (Class type : classes) { if (LOG.isTraceEnabled()) { LOG.trace("Loading converter class: {}", ObjectHelper.name(type)); } loadConverterMethods(registry, type); } // register fallback converters URL fallbackUrl = bundle.getEntry(META_INF_FALLBACK_TYPE_CONVERTER); if (fallbackUrl != null) { TypeConverter tc = createInstance("FallbackTypeConverter", fallbackUrl, registry.getInjector()); registry.addFallbackTypeConverter(tc, false); } // now clear the maps so we do not hold references visitedClasses.clear(); visitedURIs.clear(); } } } protected abstract static class BaseResolver<T> extends BaseService { private final Class<T> type; public BaseResolver(Bundle bundle, Class<T> type) { super(bundle); this.type = type; } protected T createInstance(String name, String path, CamelContext context) { if (path == null) { return null; } URL url = bundle.getEntry(path); LOG.debug("The entry {}'s url is {}", name, url); return createInstance(name, url, context.getInjector()); } @SuppressWarnings("unchecked") protected T createInstance(String name, URL url, Injector injector) { try { Properties properties = loadProperties(url); String classname = (String) properties.get("class"); Class<T> type = bundle.loadClass(classname); if (!this.type.isAssignableFrom(type)) { throw new IllegalArgumentException("Type is not a " + this.type.getName() + " implementation. Found: " + type.getName()); } return injector.newInstance(type); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Invalid URI, no " + this.type.getName() + " registered for scheme : " + name, e); } } } protected abstract static class BaseService { protected final Bundle bundle; private ServiceRegistration reg; protected BaseService(Bundle bundle) { this.bundle = bundle; } public abstract void register(); protected void doRegister(Class type, String key, Collection<String> value) { doRegister(type, key, value.toArray(new String[value.size()])); } protected void doRegister(Class type, String key, Object value) { Hashtable<String, Object> props = new Hashtable<String, Object>(); props.put(key, value); doRegister(type, props); } protected void doRegister(Class type) { doRegister(type, null); } protected void doRegister(Class type, Dictionary props) { reg = bundle.getBundleContext().registerService(type.getName(), this, props); } public void unregister() { reg.unregister(); } } protected static Properties loadProperties(URL url) { Properties properties = new Properties(); BufferedInputStream reader = null; try { reader = new BufferedInputStream(url.openStream()); properties.load(reader); } catch (IOException e) { throw new RuntimeException(e); } finally { IOHelper.close(reader, "properties", LOG); } return properties; } protected static boolean checkCompat(Bundle bundle, Class clazz) { // Check bundle compatibility try { if (bundle.loadClass(clazz.getName()) != clazz) { return false; } } catch (Throwable t) { return false; } return true; } protected static Set<String> getConverterPackages(URL resource) { Set<String> packages = new LinkedHashSet<String>(); if (resource != null) { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(resource.openStream())); while (true) { String line = reader.readLine(); if (line == null) { break; } line = line.trim(); if (line.startsWith("#") || line.length() == 0) { continue; } StringTokenizer iter = new StringTokenizer(line, ","); while (iter.hasMoreTokens()) { String name = iter.nextToken().trim(); if (name.length() > 0) { packages.add(name); } } } } catch (Exception ignore) { // Do nothing here } finally { IOHelper.close(reader, null, LOG); } } return packages; } }
/* * HE_Mesh Frederik Vanhoutte - www.wblut.com * * https://github.com/wblut/HE_Mesh * A Processing/Java library for for creating and manipulating polygonal meshes. * * Public Domain: http://creativecommons.org/publicdomain/zero/1.0/ */ package wblut.nurbs; import wblut.geom.WB_Coord; import wblut.geom.WB_GeometryFactory; import wblut.geom.WB_Point; import wblut.geom.WB_PointHomogeneous; import wblut.geom.WB_Surface; import wblut.hemesh.HEC_FromFacelist; import wblut.hemesh.HE_Mesh; /** * */ public class WB_BSplineSurface implements WB_Surface { /** * */ private static WB_GeometryFactory gf = new WB_GeometryFactory(); /** * */ protected WB_NurbsKnot uknot; /** * */ protected WB_NurbsKnot vknot; /** * */ protected WB_Coord[][] points; /** * */ protected int p; /** * */ protected int n; /** * */ protected int q; /** * */ protected int m; /** * */ public WB_BSplineSurface() { } /** * * * @param controlPoints * @param uknot * @param vknot */ public WB_BSplineSurface(final WB_Coord[][] controlPoints, final WB_NurbsKnot uknot, final WB_NurbsKnot vknot) { if (uknot.n != controlPoints.length - 1) { throw new IllegalArgumentException("U knot size and/or degree doesn't match number of control points."); } if (vknot.n != controlPoints[0].length - 1) { throw new IllegalArgumentException("V knot size and/or degree doesn't match number of control points."); } p = uknot.p(); n = uknot.n(); q = vknot.p(); m = vknot.n(); this.uknot = uknot; this.vknot = vknot; points = controlPoints; // for (int i = 0; i < points.length; i++) { // System.out.println(i + " " + points[i][0]); // System.out.println(i + " " + points[i][1]); // } } /** * * * @param controlPoints * @param uknot * @param vknot */ public WB_BSplineSurface(final WB_PointHomogeneous[][] controlPoints, final WB_NurbsKnot uknot, final WB_NurbsKnot vknot) { if (uknot.n != controlPoints.length - 1) { throw new IllegalArgumentException("U knot size and/or degree doesn't match number of control points."); } if (vknot.n != controlPoints[0].length - 1) { throw new IllegalArgumentException("V knot size and/or degree doesn't match number of control points."); } p = uknot.p(); n = uknot.n(); q = vknot.p(); m = vknot.n(); this.uknot = uknot; this.vknot = vknot; points = new WB_Point[n + 1][m + 1]; for (int i = 0; i <= n; i++) { for (int j = 0; j <= m; j++) { points[i][j] = controlPoints[i][j].project(); } } } /** * * * @param controlPoints * @param udegree * @param vdegree */ public WB_BSplineSurface(final WB_Coord[][] controlPoints, final int udegree, final int vdegree) { uknot = new WB_NurbsKnot(controlPoints.length, udegree); vknot = new WB_NurbsKnot(controlPoints[0].length, vdegree); p = uknot.p(); n = uknot.n(); q = vknot.p(); m = vknot.n(); points = controlPoints; } /** * * * @param point00 * @param point10 * @param point01 * @param point11 */ public WB_BSplineSurface(final WB_Coord point00, final WB_Coord point10, final WB_Coord point01, final WB_Coord point11) { uknot = new WB_NurbsKnot(2, 1); vknot = new WB_NurbsKnot(2, 1); p = uknot.p(); n = uknot.n(); q = vknot.p(); m = vknot.n(); points = new WB_Point[2][2]; points[0][0] = point00; points[0][1] = point01; points[1][0] = point10; points[1][1] = point11; } /* * (non-Javadoc) * * @see wblut.nurbs.WB_Surface#surfacePoint(double, double) */ @Override public WB_Point surfacePoint(final double u, final double v) { final int uspan = uknot.span(u); final double[] Nu = uknot.basisFunctions(uspan, u); final int vspan = vknot.span(v); final double[] Nv = vknot.basisFunctions(vspan, v); final int uind = uspan - p; final WB_Point S = new WB_Point(); WB_Point tmp; for (int el = 0; el <= q; el++) { tmp = new WB_Point(); final int vind = vspan - q + el; for (int k = 0; k <= p; k++) { tmp.addSelf(Nu[k] * points[uind + k][vind].xd(), Nu[k] * points[uind + k][vind].yd(), Nu[k] * points[uind + k][vind].zd()); } S.addSelf(tmp.mulSelf(Nv[el])); } return S; } /** * * * @return */ public HE_Mesh toControlHemesh() { final WB_Coord[] cpoints = new WB_Point[(n + 1) * (m + 1)]; for (int i = 0; i <= n; i++) { for (int j = 0; j <= m; j++) { cpoints[i + (n + 1) * j] = points[i][j]; } } final int[][] faces = new int[n * m][4]; for (int i = 0; i < n; i++) { for (int j = 0; j < m; j++) { faces[i + n * j][0] = i + (n + 1) * j; faces[i + n * j][1] = i + 1 + (n + 1) * j; faces[i + n * j][2] = i + 1 + (n + 1) * (j + 1); faces[i + n * j][3] = i + (n + 1) * (j + 1); } } final HEC_FromFacelist fl = new HEC_FromFacelist(); fl.setFaces(faces).setVertices(cpoints); return new HE_Mesh(fl); } /** * * * @param u * @return */ public WB_BSplineSurface insertUKnot(final double u) { return insertUKnot(u, 1); } /** * * * @param u * @return */ public WB_BSplineSurface insertUKnotMax(final double u) { final int k = uknot.multiplicity(u); return insertUKnot(u, p - k); } /** * * * @param u * @param r * @return */ public WB_BSplineSurface insertUKnot(final double u, final int r) { final int nq = n + r; final int k = uknot.span(u); final int s = uknot.multiplicity(u, k); if (r + s > p) { throw new IllegalArgumentException("Attempting to increase knot multiplicity above curve degree."); } final WB_NurbsKnot UQ = new WB_NurbsKnot(n + 1 + r, p); for (int i = 0; i <= k; i++) { UQ.setValue(i, uknot.value(i)); } for (int i = 1; i <= r; i++) { UQ.setValue(k + i, u); } for (int i = k + 1; i <= n + p + 1; i++) { UQ.setValue(i + r, uknot.value(i)); } int L = 0; final double[][] alpha = new double[p - s + 1][r + 1]; for (int j = 1; j <= r; j++) { L = k - p + j; for (int i = 0; i <= p - j - s; i++) { alpha[i][j] = (u - uknot.value(L + i)) / (uknot.value(i + k + 1) - uknot.value(L + i)); } } final WB_Point[][] Q = new WB_Point[nq + 1][m + 1]; final WB_Point[] RW = new WB_Point[p - s + 1]; for (int row = 0; row <= m; row++) { for (int i = 0; i <= k - p; i++) { Q[i][row] = new WB_Point(points[i][row]); } for (int i = k - s; i <= n; i++) { Q[i + r][row] = new WB_Point(points[i][row]); } for (int i = 0; i <= p - s; i++) { RW[i] = new WB_Point(points[k - p + i][row]); } for (int j = 1; j <= r; j++) { L = k - p + j; for (int i = 0; i <= p - j - s; i++) { RW[i] = gf.createInterpolatedPoint(RW[i], RW[i + 1], alpha[i][j]); } Q[L][row] = RW[0]; Q[k + r - j - s][row] = RW[p - j - s]; } for (int i = L + 1; i < k - s; i++) { Q[i][row] = RW[i - L]; } } return new WB_BSplineSurface(Q, UQ, vknot); } /** * * * @param v * @return */ public WB_BSplineSurface insertVKnot(final double v) { return insertVKnot(v, 1); } /** * * * @param v * @return */ public WB_BSplineSurface insertVKnotMax(final double v) { final int k = vknot.multiplicity(v); return insertVKnot(v, q - k); } /** * * * @param v * @param r * @return */ public WB_BSplineSurface insertVKnot(final double v, final int r) { final int mq = m + r; final int k = vknot.span(v); final int s = vknot.multiplicity(v, k); if (r + s > q) { throw new IllegalArgumentException("Attempting to increase knot multiplicity above curve degree."); } final WB_NurbsKnot VQ = new WB_NurbsKnot(m + 1 + r, q); for (int i = 0; i <= k; i++) { VQ.setValue(i, vknot.value(i)); } for (int i = 1; i <= r; i++) { VQ.setValue(k + i, v); } for (int i = k + 1; i <= m + q + 1; i++) { VQ.setValue(i + r, vknot.value(i)); } int L = 0; final double[][] alpha = new double[q - s + 1][r + 1]; for (int j = 1; j <= r; j++) { L = k - q + j; for (int i = 0; i <= q - j - s; i++) { alpha[i][j] = (v - vknot.value(L + i)) / (vknot.value(i + k + 1) - vknot.value(L + i)); } } final WB_Point[][] Q = new WB_Point[n + 1][mq + 1]; final WB_Point[] RW = new WB_Point[q - s + 1]; for (int col = 0; col <= n; col++) { for (int i = 0; i <= k - q; i++) { Q[col][i] = new WB_Point(points[col][i]); } for (int i = k - s; i <= m; i++) { Q[col][i + r] = new WB_Point(points[col][i]); } for (int i = 0; i <= q - s; i++) { RW[i] = new WB_Point(points[col][k - q + i]); } for (int j = 1; j <= r; j++) { L = k - q + j; for (int i = 0; i <= q - j - s; i++) { RW[i] = gf.createInterpolatedPoint(RW[i], RW[i + 1], alpha[i][j]); } Q[col][L] = RW[0]; Q[col][k + r - j - s] = RW[q - j - s]; } for (int i = L + 1; i < k - s; i++) { Q[col][i] = RW[i - L]; } } return new WB_BSplineSurface(Q, uknot, VQ); } /** * * * @param u * @return */ public WB_BSpline isoCurveU(final double u) { final WB_Point[] cpoints = new WB_Point[m + 1]; final int span = uknot.span(u); double[] N; for (int j = 0; j <= m; j++) { N = uknot.basisFunctions(span, u); cpoints[j] = new WB_Point(); for (int i = 0; i <= p; i++) { final WB_Coord tmp = points[span - p + i][j]; cpoints[j].addSelf(N[i] * tmp.xd(), N[i] * tmp.yd(), N[i] * tmp.zd()); } } return new WB_BSpline(cpoints, vknot); } /** * * * @param v * @return */ public WB_BSpline isoCurveV(final double v) { final WB_Point[] cpoints = new WB_Point[n + 1]; final int span = vknot.span(v); double[] N; for (int i = 0; i <= n; i++) { N = vknot.basisFunctions(span, v); cpoints[i] = new WB_Point(); for (int j = 0; j <= q; j++) { final WB_Coord tmp = points[i][span - q + j]; cpoints[i].addSelf(N[j] * tmp.xd(), N[j] * tmp.yd(), N[j] * tmp.zd()); } } return new WB_BSpline(cpoints, uknot); } /** * * * @return */ public WB_Coord[][] points() { return points; } /** * * * @return */ public int p() { return p; } /** * * * @return */ public int n() { return n; } /** * * * @return */ public int q() { return q; } /** * * * @return */ public int m() { return m; } /** * * * @return */ public WB_NurbsKnot uknot() { return uknot; } /** * * * @return */ public WB_NurbsKnot vknot() { return vknot; } /** * * * @param u * @return */ public WB_BSplineSurface[] splitU(final double u) { final WB_BSplineSurface newBSplineSurface = insertUKnotMax(u); final int k = newBSplineSurface.uknot().span(u); final int km = newBSplineSurface.uknot().m; final WB_NurbsKnot knot1 = new WB_NurbsKnot(k + 1 - p, p); for (int i = 0; i < knot1.m; i++) { knot1.setValue(i, newBSplineSurface.uknot().value(i)); } knot1.setValue(knot1.m, u); knot1.normalize(); final WB_Coord[][] points1 = new WB_Coord[k + 1 - p][m + 1]; for (int j = 0; j <= m; j++) { for (int i = 0; i < k + 1 - p; i++) { points1[i][j] = newBSplineSurface.points[i][j]; } } final WB_NurbsKnot knot2 = new WB_NurbsKnot(km - k, p); for (int i = 0; i <= p; i++) { knot2.setValue(i, u); } for (int i = k + 1; i <= km; i++) { knot2.setValue(i - k + p, newBSplineSurface.uknot().value(i)); } knot2.normalize(); final WB_Coord[][] points2 = new WB_Coord[km - k][m + 1]; for (int j = 0; j <= m; j++) { for (int i = 0; i < km - k; i++) { points2[i][j] = newBSplineSurface.points[k - p + i][j]; } } final WB_BSplineSurface[] splitSurfaces = new WB_BSplineSurface[2]; splitSurfaces[0] = new WB_BSplineSurface(points1, knot1, vknot); splitSurfaces[1] = new WB_BSplineSurface(points2, knot2, vknot); return splitSurfaces; } /** * * * @param v * @return */ public WB_BSplineSurface[] splitV(final double v) { final WB_BSplineSurface newBSplineSurface = insertVKnotMax(v); final int k = newBSplineSurface.vknot().span(v); final int km = newBSplineSurface.vknot().m; final WB_NurbsKnot knot1 = new WB_NurbsKnot(k + 1 - q, q); for (int i = 0; i < knot1.m; i++) { knot1.setValue(i, newBSplineSurface.vknot().value(i)); } knot1.setValue(knot1.m, v); knot1.normalize(); final WB_Coord[][] points1 = new WB_Coord[n + 1][k + 1 - q]; for (int j = 0; j <= n; j++) { for (int i = 0; i < k + 1 - q; i++) { points1[j][i] = newBSplineSurface.points[j][i]; } } final WB_NurbsKnot knot2 = new WB_NurbsKnot(km - k, q); for (int i = 0; i <= q; i++) { knot2.setValue(i, v); } for (int i = k + 1; i <= km; i++) { knot2.setValue(i - k + q, newBSplineSurface.vknot().value(i)); } knot2.normalize(); final WB_Coord[][] points2 = new WB_Coord[n + 1][km - k]; for (int j = 0; j <= n; j++) { for (int i = 0; i < km - k; i++) { points2[j][i] = newBSplineSurface.points[j][k - q + i]; } } final WB_BSplineSurface[] splitSurfaces = new WB_BSplineSurface[2]; splitSurfaces[0] = new WB_BSplineSurface(points1, uknot, knot1); splitSurfaces[1] = new WB_BSplineSurface(points2, uknot, knot2); return splitSurfaces; } /** * * * @param u * @param v * @return */ public WB_BSplineSurface[] split(final double u, final double v) { final WB_BSplineSurface[] splitSurfaces = new WB_BSplineSurface[4]; WB_BSplineSurface[] tmp = new WB_BSplineSurface[2]; tmp = splitU(u); splitSurfaces[0] = tmp[0]; splitSurfaces[2] = tmp[1]; tmp = splitSurfaces[0].splitV(v); splitSurfaces[0] = tmp[0]; splitSurfaces[1] = tmp[1]; tmp = splitSurfaces[2].splitV(v); splitSurfaces[2] = tmp[0]; splitSurfaces[3] = tmp[1]; return splitSurfaces; } /* * (non-Javadoc) * * @see wblut.nurbs.WB_Curve#loweru() */ @Override public double loweru() { return uknot.value(0); } /* * (non-Javadoc) * * @see wblut.nurbs.WB_Curve#upperu() */ @Override public double upperu() { return uknot.value(uknot.m); } /* * (non-Javadoc) * * @see wblut.nurbs.WB_Curve#loweru() */ @Override public double lowerv() { return vknot.value(0); } /* * (non-Javadoc) * * @see wblut.nurbs.WB_Curve#upperu() */ @Override public double upperv() { return vknot.value(vknot.m); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.impl.util; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.net.URLClassLoader; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import org.antlr.runtime.CommonTokenStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.StringUtils; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapReduce; import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims; import org.apache.pig.impl.PigContext; import org.apache.tools.bzip2r.BZip2Constants; import org.joda.time.DateTime; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.metastore.HiveMetaStore; import com.google.common.collect.Multimaps; import dk.brics.automaton.Automaton; public class JarManager { private static Log log = LogFactory.getLog(JarManager.class); private static enum DefaultPigPackages { PIG(PigMapReduce.class), BZIP2R(BZip2Constants.class), AUTOMATON(Automaton.class), ANTLR(CommonTokenStream.class), GUAVA(Multimaps.class), JODATIME(DateTime.class); private final Class pkgClass; DefaultPigPackages(Class pkgClass) { this.pkgClass = pkgClass; } public Class getPkgClass() { return pkgClass; } } private static enum HivePackages { HIVE_EXEC(AbstractMapJoinOperator.class), HIVE_METASTORE(HiveMetaStore.class); private final Class pkgClass; HivePackages(Class pkgClass) { this.pkgClass = pkgClass; } public Class getPkgClass() { return pkgClass; } } public static File createPigScriptUDFJar(PigContext pigContext) throws IOException { File scriptUDFJarFile = File.createTempFile("PigScriptUDF", ".jar"); // ensure the scriptUDFJarFile is deleted on exit scriptUDFJarFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(scriptUDFJarFile); HashMap<String, String> contents = new HashMap<String, String>(); createPigScriptUDFJar(fos, pigContext, contents); if (!contents.isEmpty()) { FileInputStream fis = null; String md5 = null; try { fis = new FileInputStream(scriptUDFJarFile); md5 = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis); } finally { if (fis != null) { fis.close(); } } File newScriptUDFJarFile = new File(scriptUDFJarFile.getParent(), "PigScriptUDF-" + md5 + ".jar"); scriptUDFJarFile.renameTo(newScriptUDFJarFile); return newScriptUDFJarFile; } return null; } private static void createPigScriptUDFJar(OutputStream os, PigContext pigContext, HashMap<String, String> contents) throws IOException { JarOutputStream jarOutputStream = new JarOutputStream(os); for (String path: pigContext.scriptFiles) { log.debug("Adding entry " + path + " to job jar" ); InputStream stream = null; File inputFile = new File(path); if (inputFile.exists()) { stream = new FileInputStream(inputFile); } else { stream = PigContext.getClassLoader().getResourceAsStream(path); } if (stream==null) { throw new IOException("Cannot find " + path); } try { addStream(jarOutputStream, path, stream, contents, inputFile.lastModified()); } finally { stream.close(); } } for (Map.Entry<String, File> entry : pigContext.getScriptFiles().entrySet()) { log.debug("Adding entry " + entry.getKey() + " to job jar" ); InputStream stream = null; if (entry.getValue().exists()) { stream = new FileInputStream(entry.getValue()); } else { stream = PigContext.getClassLoader().getResourceAsStream(entry.getValue().getPath()); } if (stream==null) { throw new IOException("Cannot find " + entry.getValue().getPath()); } try { addStream(jarOutputStream, entry.getKey(), stream, contents, entry.getValue().lastModified()); } finally { stream.close(); } } if (!contents.isEmpty()) { jarOutputStream.close(); } else { os.close(); } } /** * Creates a Classloader based on the passed jarFile and any extra jar files. * * @param jarFile * the jar file to be part of the newly created Classloader. This jar file plus any * jars in the extraJars list will constitute the classpath. * @return the new Classloader. * @throws MalformedURLException */ static ClassLoader createCl(String jarFile, PigContext pigContext) throws MalformedURLException { int len = pigContext.extraJars.size(); int passedJar = jarFile == null ? 0 : 1; URL urls[] = new URL[len + passedJar]; if (jarFile != null) { urls[0] = new URL("file:" + jarFile); } for (int i = 0; i < pigContext.extraJars.size(); i++) { urls[i + passedJar] = new URL("file:" + pigContext.extraJars.get(i)); } return new URLClassLoader(urls, PigMapReduce.class.getClassLoader()); } /** * Adds a stream to a Jar file. * * @param os * the OutputStream of the Jar file to which the stream will be added. * @param name * the name of the stream. * @param is * the stream to add. * @param contents * the current contents of the Jar file. (We use this to avoid adding two streams * with the same name. * @param timestamp * timestamp of the entry * @throws IOException */ private static void addStream(JarOutputStream os, String name, InputStream is, Map<String, String> contents, long timestamp) throws IOException { if (contents.get(name) != null) { return; } contents.put(name, ""); JarEntry entry = new JarEntry(name); entry.setTime(timestamp); os.putNextEntry(entry); byte buffer[] = new byte[4096]; int rc; while ((rc = is.read(buffer)) > 0) { os.write(buffer, 0, rc); } } public static List<String> getDefaultJars() { List<String> defaultJars = new ArrayList<String>(); for (DefaultPigPackages pkgToSend : DefaultPigPackages.values()) { if(pkgToSend.equals(DefaultPigPackages.GUAVA) && HadoopShims.isHadoopYARN()) { continue; //Skip } String jar = findContainingJar(pkgToSend.getPkgClass()); if (!defaultJars.contains(jar)) { defaultJars.add(jar); } } return defaultJars; } public static List<String> getHiveJars() { List<String> hiveJars = new ArrayList<String>(); for (HivePackages pkgToSend : HivePackages.values()) { String jar = findContainingJar(pkgToSend.getPkgClass()); if (!hiveJars.contains(jar)) { hiveJars.add(jar); } } return hiveJars; } /** * Find a jar that contains a class of the same name, if any. It will return a jar file, even if * that is not the first thing on the class path that has a class with the same name. * * @param my_class * the class to find * @return a jar file that contains the class, or null * @throws IOException */ public static String findContainingJar(Class my_class) { ClassLoader loader = PigContext.getClassLoader(); String class_file = my_class.getName().replaceAll("\\.", "/") + ".class"; try { Enumeration<URL> itr = null; //Try to find the class in registered jars if (loader instanceof URLClassLoader) { itr = ((URLClassLoader) loader).findResources(class_file); } //Try system classloader if not URLClassLoader or no resources found in URLClassLoader if (itr == null || !itr.hasMoreElements()) { itr = loader.getResources(class_file); } for (; itr.hasMoreElements();) { URL url = (URL) itr.nextElement(); if ("jar".equals(url.getProtocol())) { String toReturn = url.getPath(); if (toReturn.startsWith("file:")) { toReturn = toReturn.substring("file:".length()); } // URLDecoder is a misnamed class, since it actually decodes // x-www-form-urlencoded MIME type rather than actual // URL encoding (which the file path has). Therefore it would // decode +s to ' 's which is incorrect (spaces are actually // either unencoded or encoded as "%20"). Replace +s first, so // that they are kept sacred during the decoding process. toReturn = toReturn.replaceAll("\\+", "%2B"); toReturn = URLDecoder.decode(toReturn, "UTF-8"); return toReturn.replaceAll("!.*$", ""); } } } catch (IOException e) { throw new RuntimeException(e); } return null; } /** * Add the jars containing the given classes to the job's configuration * such that JobClient will ship them to the cluster and add them to * the DistributedCache * * @param job * Job object * @param classes * classes to find * @throws IOException */ public static void addDependencyJars(Job job, Class<?>... classes) throws IOException { Configuration conf = job.getConfiguration(); FileSystem fs = FileSystem.getLocal(conf); Set<String> jars = new HashSet<String>(); jars.addAll(conf.getStringCollection("tmpjars")); addQualifiedJarsName(fs, jars, classes); if (jars.isEmpty()) return; conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[0]))); } /** * Add the qualified path name of jars containing the given classes * * @param fs * FileSystem object * @param jars * the resolved path names to be added to this set * @param classes * classes to find */ private static void addQualifiedJarsName(FileSystem fs, Set<String> jars, Class<?>... classes) { URI fsUri = fs.getUri(); Path workingDir = fs.getWorkingDirectory(); for (Class<?> clazz : classes) { String jarName = findContainingJar(clazz); if (jarName == null) { log.warn("Could not find jar for class " + clazz); continue; } jars.add(new Path(jarName).makeQualified(fsUri, workingDir).toString()); } } }
/* * * * * Copyright (c) 2015-2016 www.Tipi.me. * * Created by Ashkan Hesaraki. * * Ashkan.Hesaraki@gmail.com * */ package me.tipi.kiosk.ui.fragments; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.content.Context; import android.os.Bundle; import android.os.CountDownTimer; import android.os.Handler; import android.os.Message; import android.support.design.widget.Snackbar; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.TextView; import com.RT_Printer.BluetoothPrinter.BLUETOOTH.BluetoothPrintDriver; import com.afollestad.materialdialogs.MaterialDialog; import com.f2prateek.rx.preferences.Preference; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import com.squareup.otto.Bus; import java.io.IOException; import java.util.Locale; import java.util.Set; import java.util.UUID; import javax.inject.Inject; import javax.inject.Named; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; import me.tipi.kiosk.R; import me.tipi.kiosk.KioskApp; import me.tipi.kiosk.data.PrinterPreference; import me.tipi.kiosk.data.api.ApiConstants; import me.tipi.kiosk.data.api.models.Guest; import me.tipi.kiosk.ui.AppContainer; import me.tipi.kiosk.ui.FindUserActivity; import me.tipi.kiosk.ui.SignUpActivity; import me.tipi.kiosk.ui.events.BackShouldShowEvent; import me.tipi.kiosk.ui.events.SettingShouldShowEvent; import me.tipi.kiosk.ui.misc.CircleCountDownView; import me.tipi.kiosk.util.Strings; import timber.log.Timber; import static com.RT_Printer.BluetoothPrinter.BLUETOOTH.BluetoothPrintDriver.BT_Write; public class SuccessSignUpFragment extends Fragment { public static final String TAG = SuccessSignUpFragment.class.getSimpleName(); @Inject PrinterPreference printerPreference; @Inject Guest guest; @Inject AppContainer appContainer; @Inject Bus bus; @Inject Tracker tracker; @Inject @Named(ApiConstants.HOSTEL_NAME) Preference<String> hostelName; @Bind(R.id.printer_btn) ImageButton printerBtn; @Bind(R.id.title) TextView titleView; @Bind(R.id.circle_count_down_view) CircleCountDownView circleCountDownView; private BluetoothAdapter mBluetoothAdapter; private BluetoothDevice mBluetoothDevice; private MaterialDialog loading; private BluetoothSocket mBluetoothSocket; private UUID applicationUUID; private BluetoothPrintDriver mChatService = null; private boolean isVisible = false; private boolean isConnecting = false; CountDownTimer countDownTimer; int progress; private Handler handler = new Handler(); private Runnable runnable = new Runnable() { @Override public void run() { startOver(); } }; /** * Instantiates a new Success sign up fragment. */ public SuccessSignUpFragment() { // Required empty public constructor } /** * New instance success sign up fragment. * * @param context the context * @return the success sign up fragment */ public static SuccessSignUpFragment newInstance(Context context) { SuccessSignUpFragment fragment = new SuccessSignUpFragment(); KioskApp.get(context).inject(fragment); return fragment; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View rootView = inflater.inflate(R.layout.fragment_success_sign_up, container, false); ButterKnife.bind(this, rootView); loading = new MaterialDialog.Builder(getActivity()) .content("Please wait...") .cancelable(false) .progress(true, 0) .build(); applicationUUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB"); if (printerPreference.get()) { connectToPrinter(); } String firstNam = Strings.getFirstName(guest.name); firstNam = firstNam.toLowerCase(Locale.US); firstNam = Character.toString(firstNam.charAt(0)).toUpperCase() + firstNam.substring(1); titleView.setText(String.format(Locale.US, getString(R.string.thanks), firstNam)); return rootView; } @Override public void onStart() { super.onStart(); if (printerPreference.get()) { if (mBluetoothAdapter.isEnabled()) { if (mChatService == null) setupChat(); } } } @Override public void onDestroy() { super.onDestroy(); if (printerPreference.get()) { if (mChatService != null) mChatService.stop(); } } @Override public void onResume() { super.onResume(); bus.register(this); if (getActivity() != null) { bus.post(new BackShouldShowEvent(false)); bus.post(new SettingShouldShowEvent(false)); } if (getActivity() != null && getActivity() instanceof SignUpActivity) { progress = 1; countDownTimer = new CountDownTimer(15000, 1000) { @Override public void onTick(long millisUntilFinished) { circleCountDownView.setProgress(progress, 15); progress = progress + 1; } @Override public void onFinish() { circleCountDownView.setProgress(progress, 15); startOver(); } }; countDownTimer.start(); } tracker.setScreenName("Success"); tracker.send(new HitBuilders.ScreenViewBuilder().build()); } @Override public void onPause() { super.onPause(); bus.unregister(this); if (countDownTimer != null) { countDownTimer.cancel(); } } @Override public void onStop() { super.onStop(); if (countDownTimer != null) { countDownTimer.cancel(); } handler.removeCallbacks(runnable); } @Override public void setUserVisibleHint(boolean isVisibleToUser) { super.setUserVisibleHint(isVisibleToUser); if (getActivity() != null && isVisibleToUser) { progress = 1; countDownTimer = new CountDownTimer(15000, 1000) { @Override public void onTick(long millisUntilFinished) { circleCountDownView.setProgress(progress, 15); progress = progress + 1; } @Override public void onFinish() { circleCountDownView.setProgress(progress, 15); startOver(); } }; countDownTimer.start(); } } @OnClick(R.id.continue_btn) public void finishTapped() { countDownTimer.cancel(); countDownTimer.onFinish(); } private void startOver() { if (getActivity() != null && getActivity() instanceof SignUpActivity) { ((SignUpActivity) getActivity()).reset(); } else if (getActivity() != null) { ((FindUserActivity) getActivity()).reset(); } } private void connectToPrinter() { if (mChatService == null) { setupChat(); } mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if (mChatService.getState() == BluetoothPrintDriver.STATE_NONE && mBluetoothAdapter != null && mBluetoothAdapter.isEnabled()) { mChatService.start(); } if (mBluetoothAdapter != null) { if (!mBluetoothAdapter.isEnabled()) { loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("Bluetooth is OFF"); } else { ListPairedDevices(); } } } private void ListPairedDevices() { Set<BluetoothDevice> mPairedDevices = mBluetoothAdapter.getBondedDevices(); if (mPairedDevices.size() > 0) { for (BluetoothDevice mDevice : mPairedDevices) { if (mDevice != null && mDevice.getAddress().equals("00:02:0A:03:3C:E0")) { isVisible = true; handleBlutooth(mDevice); break; } } if (!isVisible) { loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("Device is not pair"); } } } private void handleBlutooth(BluetoothDevice mDevice) { String mDeviceAddress = mDevice.getAddress(); mBluetoothDevice = mBluetoothAdapter.getRemoteDevice(mDeviceAddress); new Thread(new Runnable() { @Override public void run() { try { mBluetoothSocket = mBluetoothDevice.createRfcommSocketToServiceRecord(applicationUUID); mBluetoothAdapter.cancelDiscovery(); mChatService.connect(mBluetoothDevice); Timber.w("ConnectToSocket"); } catch (IOException eConnectException) { Timber.w("CouldNotConnectToSocket"); closeSocket(mBluetoothSocket); } } }).start(); } private void setupChat() { mChatService = new BluetoothPrintDriver(getActivity(), mHandler); } private final Handler mHandler; { mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case 1: Timber.w("MESSAGE_STATE_CHANGE: %s", msg.arg1); switch (msg.arg1) { case BluetoothPrintDriver.STATE_CONNECTED: Timber.w("Print connected"); if (BluetoothPrintDriver.IsNoConnection()) { printerBtn.setVisibility(View.VISIBLE); showSnackbar("Printer not connected"); } isConnecting = false; printQR(guest.email); loading.dismiss(); printFeed(3); break; case BluetoothPrintDriver.STATE_CONNECTING: loading.show(); isConnecting = true; Timber.w("Print connecting"); break; case BluetoothPrintDriver.STATE_LISTEN: case BluetoothPrintDriver.STATE_NONE: Timber.w("Printer not connected"); loading.dismiss(); if (isConnecting) { printerBtn.setVisibility(View.VISIBLE); showSnackbar("Printer not connected"); isConnecting = false; } break; } break; case 3: break; case 2: byte[] readBuf = (byte[]) msg.obj; Timber.w("readBuf[0]:" + readBuf[0] + " readBuf[1]:" + readBuf[1] + "readBuf[2]:" + readBuf[2]); if (readBuf[2] == 0) Timber.w("NO ERROR!"); else { if ((readBuf[2] & 0x02) != 0) { Timber.w("ERROR: No printer connected!"); loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("No printer connected!"); } if ((readBuf[2] & 0x04) != 0) { Timber.w("ERROR: m! "); loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("No paper!"); } if ((readBuf[2] & 0x08) != 0) { Timber.w("ERROR: Voltage is too low!!"); loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("Voltage is too low!"); } if ((readBuf[2] & 0x40) != 0) { Timber.w("ERROR: Printer Over Heat! "); loading.dismiss(); printerBtn.setVisibility(View.VISIBLE); showSnackbar("Printer Over Heat!"); } } break; } } }; } private void closeSocket(BluetoothSocket nOpenSocket) { try { nOpenSocket.close(); Timber.w("SocketClosed"); } catch (IOException ex) { Timber.w("CouldNotCloseSocket"); } } private void printQR(String email) { int emailLength = email.length(); printFeed(3); byte[] cmd = new byte[]{(byte) 29, (byte) 40, (byte) 107, (byte) 3, (byte) 0, (byte) 49, (byte) 67, (byte) 10, (byte) 29, (byte) 40, (byte) 107, (byte) 3, (byte) 0, (byte) 49, (byte) 69, (byte) 51, (byte) 29, (byte) 40, (byte) 107, (byte) 10, (byte) 0, (byte) 49, (byte) 80, (byte) 48, (byte) 29, (byte) 40, (byte) 107, (byte) 3, (byte) 0, (byte) 49, (byte) 81, (byte) 48, (byte) 27, (byte) 97, (byte) 49, (byte) 29, (byte) 40, (byte) 107, (byte) 4, (byte) 0, (byte) 49, (byte) 65, (byte) 49, (byte) 0, (byte) 27, (byte) 100, (byte) 5}; byte[] res = new byte[41 + emailLength]; System.arraycopy(cmd, 0, res, 0, 24); res[19] = (byte) (emailLength + 3); for (int i = 24; i < 24 + emailLength; i++) { res[i] = (byte) email.charAt(i - 24); } System.arraycopy(cmd, 24, res, 24 + emailLength, 16); BT_Write(res); BluetoothPrintDriver.printString(" "); } @Override public void onDestroyView() { super.onDestroyView(); ButterKnife.unbind(this); } @OnClick(R.id.printer_btn) public void onClick() { connectToPrinter(); } private void showSnackbar(String error) { String finalError = String.format(Locale.US, ". \n ( %s )", error); final Snackbar snackbar = Snackbar.make(appContainer.bind(getActivity()), getString(R.string.printer_error_snackbar) + finalError, Snackbar.LENGTH_INDEFINITE); snackbar.setAction("Ok", new View.OnClickListener() { @Override public void onClick(View view) { snackbar.dismiss(); } }); View snackbarView = snackbar.getView(); TextView textView = (TextView) snackbarView.findViewById(android.support.design.R.id.snackbar_text); textView.setMaxLines(2); snackbar.show(); } private void printFeed(int feedSize) { byte[] feed = new byte[]{(byte) 27, (byte) 100, (byte) feedSize}; BT_Write(feed); } }
package com.amplitude.api; import androidx.test.ext.junit.runners.AndroidJUnit4; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.Shadows; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowLooper; import okhttp3.mockwebserver.RecordedRequest; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @RunWith(AndroidJUnit4.class) @Config(manifest = Config.NONE) public class SessionTest extends BaseTest { // allows for control of System.currentTimeMillis private class AmplitudeCallbacksWithTime extends AmplitudeCallbacks { private int index; private long [] timestamps = null; public AmplitudeCallbacksWithTime(AmplitudeClient client, long [] timestamps) { super(client); this.index = 0; this.timestamps = timestamps; } @Override protected long getCurrentTimeMillis() { return timestamps[index++ % timestamps.length]; } } @Before public void setUp() throws Exception { super.setUp(true); amplitude.initialize(context, apiKey); Shadows.shadowOf(amplitude.logThread.getLooper()).runOneTask(); } @After public void tearDown() throws Exception { super.tearDown(); } @Test public void testDefaultStartSession() { long timestamp = System.currentTimeMillis(); amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessionEvents is false, no start_session event added assertEquals(getUnsentEventCount(), 1); JSONObject event = getLastUnsentEvent(); assertEquals(event.optString("event_type"), "test"); assertEquals(event.optString("session_id"), String.valueOf(timestamp)); } @Test public void testDefaultTriggerNewSession() { ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 1st event, initialize first session long timestamp1 = System.currentTimeMillis(); amplitude.logEventAsync("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 1); // log 2nd event past timeout, verify new session started long timestamp2 = timestamp1 + sessionTimeoutMillis; amplitude.logEventAsync("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 2); JSONArray events = getUnsentEvents(2); JSONObject event1 = events.optJSONObject(0); JSONObject event2 = events.optJSONObject(1); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp2)); // also test getSessionId assertEquals(amplitude.getSessionId(), timestamp2); } @Test public void testDefaultExtendSession() { ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 3 events all just within session expiration window, verify all in same session long timestamp1 = System.currentTimeMillis(); amplitude.logEventAsync("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 1); long timestamp2 = timestamp1 + sessionTimeoutMillis - 1; amplitude.logEventAsync("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 2); long timestamp3 = timestamp2 + sessionTimeoutMillis - 1; amplitude.logEventAsync("test3", null, null, null, null, null, timestamp3, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 3); JSONArray events = getUnsentEvents(3); JSONObject event1 = events.optJSONObject(0); JSONObject event2 = events.optJSONObject(1); JSONObject event3 = events.optJSONObject(2); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("timestamp"), String.valueOf(timestamp1)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event2.optString("timestamp"), String.valueOf(timestamp2)); assertEquals(event3.optString("event_type"), "test3"); assertEquals(event3.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event3.optString("timestamp"), String.valueOf(timestamp3)); } @Test public void testDefaultStartSessionWithTracking() { amplitude.trackSessionEvents(true); long timestamp = System.currentTimeMillis(); amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); JSONArray events = getUnsentEvents(2); JSONObject session_event = events.optJSONObject(0); JSONObject test_event = events.optJSONObject(1); assertEquals(session_event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("event_type"), "test"); assertEquals(test_event.optString("session_id"), String.valueOf(timestamp)); } @Test public void testDefaultStartSessionWithTrackingSynchronous() { amplitude.trackSessionEvents(true); long timestamp = System.currentTimeMillis(); amplitude.logEvent("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); // verify order of synchronous events JSONArray events = getUnsentEvents(2); JSONObject session_event = events.optJSONObject(0); JSONObject test_event = events.optJSONObject(1); assertEquals(session_event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("event_type"), "test"); assertEquals(test_event.optString("session_id"), String.valueOf(timestamp)); } @Test public void testDefaultTriggerNewSessionWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 1st event, initialize first session long timestamp1 = System.currentTimeMillis(); amplitude.logEventAsync("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); // log 2nd event past timeout, verify new session started long timestamp2 = timestamp1 + sessionTimeoutMillis; amplitude.logEventAsync("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); // trackSessions is true, end_session and start_session events are added assertEquals(getUnsentEventCount(), 5); JSONArray events = getUnsentEvents(5); JSONObject startSession1 = events.optJSONObject(0); JSONObject event1 = events.optJSONObject(1); JSONObject endSession = events.optJSONObject(2); JSONObject startSession2 = events.optJSONObject(3); JSONObject event2 = events.optJSONObject(4); assertEquals(startSession1.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession1.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("timestamp"), String.valueOf(timestamp1)); assertEquals(endSession.optString("event_type"), AmplitudeClient.END_SESSION_EVENT); assertEquals( endSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.END_SESSION_EVENT ); assertEquals(endSession.optString("session_id"), String.valueOf(timestamp1)); assertEquals(startSession2.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession2.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession2.optString("session_id"), String.valueOf(timestamp2)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp2)); assertEquals(event2.optString("timestamp"), String.valueOf(timestamp2)); } @Test public void testDefaultTriggerNewSessionWithTrackingSynchronous() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 1st event, initialize first session long timestamp1 = System.currentTimeMillis(); amplitude.logEvent("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); // log 2nd event past timeout, verify new session started long timestamp2 = timestamp1 + sessionTimeoutMillis; amplitude.logEvent("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); // trackSessions is true, end_session and start_session events are added assertEquals(getUnsentEventCount(), 5); // verify order of synchronous events JSONArray events = getUnsentEvents(5); JSONObject startSession1 = events.optJSONObject(0); JSONObject event1 = events.optJSONObject(1); JSONObject endSession = events.optJSONObject(2); JSONObject startSession2 = events.optJSONObject(3); JSONObject event2 = events.optJSONObject(4); assertEquals(startSession1.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession1.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("timestamp"), String.valueOf(timestamp1)); assertEquals(endSession.optString("event_type"), AmplitudeClient.END_SESSION_EVENT); assertEquals( endSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.END_SESSION_EVENT ); assertEquals(endSession.optString("session_id"), String.valueOf(timestamp1)); assertEquals(startSession2.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession2.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession2.optString("session_id"), String.valueOf(timestamp2)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp2)); assertEquals(event2.optString("timestamp"), String.valueOf(timestamp2)); } @Test public void testDefaultExtendSessionWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 3 events all just within session expiration window, verify all in same session long timestamp1 = System.currentTimeMillis(); amplitude.logEventAsync("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); long timestamp2 = timestamp1 + sessionTimeoutMillis - 1; amplitude.logEventAsync("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 3); long timestamp3 = timestamp2 + sessionTimeoutMillis - 1; amplitude.logEventAsync("test3", null, null, null, null, null, timestamp3, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 4); JSONArray events = getUnsentEvents(4); JSONObject startSession = events.optJSONObject(0); JSONObject event1 = events.optJSONObject(1); JSONObject event2 = events.optJSONObject(2); JSONObject event3 = events.optJSONObject(3); assertEquals(startSession.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("timestamp"), String.valueOf(timestamp1)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event2.optString("timestamp"), String.valueOf(timestamp2)); assertEquals(event3.optString("event_type"), "test3"); assertEquals(event3.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event3.optString("timestamp"), String.valueOf(timestamp3)); } @Test public void testDefaultExtendSessionWithTrackingSynchronous() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); // log 3 events all just within session expiration window, verify all in same session long timestamp1 = System.currentTimeMillis(); amplitude.logEvent("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); long timestamp2 = timestamp1 + sessionTimeoutMillis - 1; amplitude.logEvent("test2", null, null, null, null, null, timestamp2, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 3); long timestamp3 = timestamp2 + sessionTimeoutMillis - 1; amplitude.logEventAsync("test3", null, null, null, null, null, timestamp3, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 4); // verify order of synchronous events JSONArray events = getUnsentEvents(4); JSONObject startSession = events.optJSONObject(0); JSONObject event1 = events.optJSONObject(1); JSONObject event2 = events.optJSONObject(2); JSONObject event3 = events.optJSONObject(3); assertEquals(startSession.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event1.optString("timestamp"), String.valueOf(timestamp1)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event2.optString("timestamp"), String.valueOf(timestamp2)); assertEquals(event3.optString("event_type"), "test3"); assertEquals(event3.optString("session_id"), String.valueOf(timestamp1)); assertEquals(event3.optString("timestamp"), String.valueOf(timestamp3)); } @Test public void testEnableAccurateTracking() { assertFalse(amplitude.isUsingForegroundTracking()); AmplitudeCallbacks callBacks = new AmplitudeCallbacks(amplitude); assertTrue(amplitude.isUsingForegroundTracking()); } @Test public void testAccurateOnResumeStartSession() { long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertFalse(amplitude.isInForeground()); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertTrue(amplitude.isInForeground()); assertEquals(amplitude.previousSessionId, timestamp); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamp); } @Test public void testAccurateOnResumeStartSessionWithTracking() { amplitude.trackSessionEvents(true); long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertFalse(amplitude.isInForeground()); assertEquals(getUnsentEventCount(), 0); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertTrue(amplitude.isInForeground()); assertEquals(amplitude.previousSessionId, timestamp); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamp); // verify that start session event sent assertEquals(getUnsentEventCount(), 1); JSONObject startSession = getLastUnsentEvent(); assertEquals(startSession.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals( startSession.optString("session_id"), String.valueOf(timestamp) ); assertEquals( startSession.optString("timestamp"), String.valueOf(timestamp) ); } @Test public void testAccurateOnPauseRefreshTimestamp() { long minTimeBetweenSessionsMillis = 5*1000; //5s long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp, timestamp + minTimeBetweenSessionsMillis}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[0]); callBacks.onActivityPaused(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertFalse(amplitude.isInForeground()); } @Test public void testAccurateOnPauseRefreshTimestampWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp, timestamp + minTimeBetweenSessionsMillis}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); callBacks.onActivityResumed(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 1); // only refresh time, no session checking callBacks.onActivityPaused(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertEquals(getUnsentEventCount(), 1); } @Test public void testAccurateOnResumeTriggerNewSession() { long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 1 + minTimeBetweenSessionsMillis }; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 0); assertTrue(amplitude.isInForeground()); // only refresh time, no session checking callBacks.onActivityPaused(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertEquals(getUnsentEventCount(), 0); assertFalse(amplitude.isInForeground()); // resume after min session expired window, verify new session started callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[2]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[2]); assertEquals(getUnsentEventCount(), 0); assertTrue(amplitude.isInForeground()); } @Test public void testAccurateOnResumeTriggerNewSessionWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 1 + minTimeBetweenSessionsMillis }; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); callBacks.onActivityResumed(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 1); assertTrue(amplitude.isInForeground()); // only refresh time, no session checking callBacks.onActivityPaused(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertEquals(getUnsentEventCount(), 1); assertFalse(amplitude.isInForeground()); // resume after min session expired window, verify new session started callBacks.onActivityResumed(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[2]); assertEquals(amplitude.lastEventId, 3); assertEquals(amplitude.lastEventTime, timestamps[2]); assertEquals(getUnsentEventCount(), 3); assertTrue(amplitude.isInForeground()); JSONArray events = getUnsentEvents(3); JSONObject startSession1 = events.optJSONObject(0); JSONObject endSession = events.optJSONObject(1); JSONObject startSession2 = events.optJSONObject(2); assertEquals(startSession1.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession1.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession1.optString("session_id"), String.valueOf(timestamps[0])); assertEquals(startSession1.optString("timestamp"), String.valueOf(timestamps[0])); assertEquals(endSession.optString("event_type"), AmplitudeClient.END_SESSION_EVENT); assertEquals( endSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.END_SESSION_EVENT ); assertEquals(endSession.optString("session_id"), String.valueOf(timestamps[0])); assertEquals(endSession.optString("timestamp"), String.valueOf(timestamps[1])); assertEquals(startSession2.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession2.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession2.optString("session_id"), String.valueOf(timestamps[2])); assertEquals(startSession2.optString("timestamp"), String.valueOf(timestamps[2])); } @Test public void testAccurateOnResumeExtendSession() { long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 1 + minTimeBetweenSessionsMillis - 1 // just inside session exp window }; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[0]); callBacks.onActivityPaused(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertFalse(amplitude.isInForeground()); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, timestamps[2]); assertTrue(amplitude.isInForeground()); } @Test public void testAccurateOnResumeExtendSessionWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 1 + minTimeBetweenSessionsMillis - 1 // just inside session exp window }; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); callBacks.onActivityResumed(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 1); callBacks.onActivityPaused(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[1]); assertFalse(amplitude.isInForeground()); assertEquals(getUnsentEventCount(), 1); callBacks.onActivityResumed(null); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[2]); assertTrue(amplitude.isInForeground()); assertEquals(getUnsentEventCount(), 1); JSONObject event = getLastUnsentEvent(); assertEquals(event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(event.optString("session_id"), String.valueOf(timestamps[0])); assertEquals(event.optString("timestamp"), String.valueOf(timestamps[0])); } @Test public void testAccurateLogAsyncEvent() { ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp + minTimeBetweenSessionsMillis - 1}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); assertFalse(amplitude.isInForeground()); // logging an event before onResume will force a session check amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamp); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamp); assertEquals(getUnsentEventCount(), 1); callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamp); assertEquals(amplitude.lastEventId, 1); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 1); assertTrue(amplitude.isInForeground()); JSONObject event = getLastUnsentEvent(); assertEquals(event.optString("event_type"), "test"); assertEquals(event.optString("session_id"), String.valueOf(timestamp)); assertEquals(event.optString("timestamp"), String.valueOf(timestamp)); } @Test public void testAccurateLogAsyncEventWithTracking() { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long minTimeBetweenSessionsMillis = 5*1000; //5s amplitude.setMinTimeBetweenSessionsMillis(minTimeBetweenSessionsMillis); long timestamp = System.currentTimeMillis(); long [] timestamps = {timestamp + minTimeBetweenSessionsMillis}; AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); assertEquals(amplitude.previousSessionId, -1); assertEquals(amplitude.lastEventId, -1); assertEquals(amplitude.lastEventTime, -1); assertEquals(getUnsentEventCount(), 0); assertFalse(amplitude.isInForeground()); // logging an event before onResume will force a session check amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); looper.runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamp); assertEquals(amplitude.lastEventId, 2); assertEquals(amplitude.lastEventTime, timestamp); assertEquals(getUnsentEventCount(), 2); // onResume after session expires will start new session callBacks.onActivityResumed(null); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); assertEquals(amplitude.previousSessionId, timestamps[0]); assertEquals(amplitude.lastEventId, 4); assertEquals(amplitude.lastEventTime, timestamps[0]); assertEquals(getUnsentEventCount(), 4); assertTrue(amplitude.isInForeground()); JSONArray events = getUnsentEvents(4); JSONObject startSession1 = events.optJSONObject(0); JSONObject event = events.optJSONObject(1); JSONObject endSession = events.optJSONObject(2); JSONObject startSession2 = events.optJSONObject(3); assertEquals(startSession1.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession1.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession1.optString("session_id"), String.valueOf(timestamp)); assertEquals(startSession1.optString("timestamp"), String.valueOf(timestamp)); assertEquals(event.optString("event_type"), "test"); assertEquals(event.optString("session_id"), String.valueOf(timestamp)); assertEquals(event.optString("timestamp"), String.valueOf(timestamp)); assertEquals(endSession.optString("event_type"), AmplitudeClient.END_SESSION_EVENT); assertEquals( endSession.optJSONObject("api_properties").optString("special"), AmplitudeClient.END_SESSION_EVENT ); assertEquals(endSession.optString("session_id"), String.valueOf(timestamp)); assertEquals(endSession.optString("timestamp"), String.valueOf(timestamp)); assertEquals(startSession2.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals( startSession2.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(startSession2.optString("session_id"), String.valueOf(timestamps[0])); assertEquals(startSession2.optString("timestamp"), String.valueOf(timestamps[0])); } @Test public void testLogOutOfSessionEvent() { ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5*1000; //1s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); long timestamp1 = System.currentTimeMillis(); amplitude.logEventAsync("test1", null, null, null, null, null, timestamp1, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 1); // log out of session event just within session expiration window long timestamp2 = timestamp1 + sessionTimeoutMillis - 1; amplitude.logEventAsync("outOfSession", null, null, null, null, null, timestamp2, true); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 2); // out of session events do not extend session, 2nd event will start new session long timestamp3 = timestamp1 + sessionTimeoutMillis; amplitude.logEventAsync("test2", null, null, null, null, null, timestamp3, false); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 3); JSONArray events = getUnsentEvents(3); JSONObject event1 = events.optJSONObject(0); JSONObject outOfSessionEvent = events.optJSONObject(1); JSONObject event2 = events.optJSONObject(2); assertEquals(event1.optString("event_type"), "test1"); assertEquals(event1.optString("session_id"), String.valueOf(timestamp1)); assertEquals(outOfSessionEvent.optString("event_type"), "outOfSession"); assertEquals(outOfSessionEvent.optString("session_id"), String.valueOf(-1)); assertEquals(event2.optString("event_type"), "test2"); assertEquals(event2.optString("session_id"), String.valueOf(timestamp3)); } @Test public void testOnPauseFlushEvents() throws JSONException { long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 2, timestamp + 3, timestamp + 4, timestamp + 5, }; ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); Robolectric.getForegroundThreadScheduler().advanceTo(1); // log an event, should not be uploaded amplitude.logEventAsync("testEvent", null, null, null, null, null, timestamps[0], false); looper.runOneTask(); looper.runOneTask(); assertEquals(getUnsentEventCount(), 1); // force client into background and verify flushing of events callBacks.onActivityPaused(null); looper.runOneTask(); // run the update server RecordedRequest request = runRequest(amplitude); JSONArray events = getEventsFromRequest(request); assertEquals(events.length(), 1); assertEquals(events.getJSONObject(0).optString("event_type"), "testEvent"); // verify that events have been cleared from client looper.runOneTask(); assertEquals(getUnsentEventCount(), 0); } @Test public void testOnPauseFlushEventsDisabled() throws JSONException { long timestamp = System.currentTimeMillis(); long [] timestamps = { timestamp, timestamp + 1, timestamp + 2, timestamp + 3, timestamp + 4, timestamp + 5, }; amplitude.setFlushEventsOnClose(false); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); AmplitudeCallbacks callBacks = new AmplitudeCallbacksWithTime(amplitude, timestamps); Robolectric.getForegroundThreadScheduler().advanceTo(1); // log an event, should not be uploaded amplitude.logEventAsync("testEvent", null, null, null, null, null, timestamps[0], false); looper.runOneTask(); assertEquals(getUnsentEventCount(), 1); // force client into background and verify no flushing of events callBacks.onActivityPaused(null); looper.runOneTask(); // run the update server RecordedRequest request = runRequest(amplitude); // flushing disabled, so no request should be sent assertNull(request); assertEquals(getUnsentEventCount(), 1); } @Test public void testIdentifyTriggerNewSession() throws JSONException { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); assertEquals(getUnsentEventCount(), 0); assertEquals(getUnsentIdentifyCount(), 0); // log 1st identify, initialize first session Identify identify = new Identify().set("key", "value"); amplitude.identify(identify); looper.runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 1); assertEquals(getUnsentIdentifyCount(), 1); JSONArray events = getUnsentEvents(1); assertEquals( events.getJSONObject(0).optString("event_type"), AmplitudeClient.START_SESSION_EVENT ); JSONArray identifies = getUnsentIdentifys(1); JSONObject expected = new JSONObject().put("$set", new JSONObject().put("key", "value")); assertTrue(Utils.compareJSONObjects( identifies.getJSONObject(0).getJSONObject("user_properties"), expected )); } @Test public void testOutOfSessionIdentifyDoesNotTriggerNewSession() throws JSONException { amplitude.trackSessionEvents(true); ShadowLooper looper = Shadows.shadowOf(amplitude.logThread.getLooper()); long sessionTimeoutMillis = 5 * 1000; //5s amplitude.setSessionTimeoutMillis(sessionTimeoutMillis); assertEquals(getUnsentEventCount(), 0); assertEquals(getUnsentIdentifyCount(), 0); // log 1st identify, initialize first session Identify identify = new Identify().set("key", "value"); amplitude.identify(identify, true); looper.runToEndOfTasks(); assertEquals(getUnsentEventCount(), 0); // out of session, start session is not added assertEquals(getUnsentIdentifyCount(), 1); JSONArray identifies = getUnsentIdentifys(1); JSONObject expected = new JSONObject().put("$set", new JSONObject().put("key", "value")); assertTrue(Utils.compareJSONObjects( identifies.getJSONObject(0).getJSONObject("user_properties"), expected )); } @Test public void testSetUserIdAndStartNewSessionWithTracking() { amplitude.trackSessionEvents(true); long timestamp = System.currentTimeMillis(); amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); // set user id and validate session ended and new session started amplitude.setUserId("test_new_user", true); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // total of 4 events, start session, test event, end session, start session assertEquals(getUnsentEventCount(), 4); JSONArray events = getUnsentEvents(4); // verify pre setUserId events JSONObject session_event = events.optJSONObject(0); JSONObject test_event = events.optJSONObject(1); assertEquals(session_event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals(session_event.optString("user_id"), "null"); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("event_type"), "test"); assertEquals(test_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("user_id"), "null"); // verify post setUserId events session_event = events.optJSONObject(2); assertEquals(session_event.optString("event_type"), AmplitudeClient.END_SESSION_EVENT); assertEquals(session_event.optString("user_id"), "null"); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.END_SESSION_EVENT ); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); session_event = events.optJSONObject(3); assertEquals(session_event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals(session_event.optString("user_id"), "test_new_user"); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); // the new event should have a newer session id assertTrue(session_event.optLong("session_id") > timestamp); } @Test public void testSetUserIdAndDoNotStartNewSessionWithTracking() { amplitude.trackSessionEvents(true); long timestamp = System.currentTimeMillis(); amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessions is true, start_session event is added assertEquals(getUnsentEventCount(), 2); // set user id and validate session ended and new session started amplitude.setUserId("test_new_user", false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // still only 2 events, start session, test event assertEquals(getUnsentEventCount(), 2); JSONArray events = getUnsentEvents(2); // verify pre setUserId events JSONObject session_event = events.optJSONObject(0); JSONObject test_event = events.optJSONObject(1); assertEquals(session_event.optString("event_type"), AmplitudeClient.START_SESSION_EVENT); assertEquals(session_event.optString("user_id"), "null"); assertEquals( session_event.optJSONObject("api_properties").optString("special"), AmplitudeClient.START_SESSION_EVENT ); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("event_type"), "test"); assertEquals(test_event.optString("session_id"), String.valueOf(timestamp)); assertEquals(test_event.optString("user_id"), "null"); // verify same session id assertEquals(amplitude.sessionId, timestamp); } @Test public void testSetUserIdAndStartNewSessionWithoutTracking() { amplitude.trackSessionEvents(false); long timestamp = System.currentTimeMillis(); amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // trackSessions is false, there should only be 1 event assertEquals(getUnsentEventCount(), 1); // set user id and validate session ended and new session started amplitude.setUserId("test_new_user", true); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // still only 1 event1, test event assertEquals(getUnsentEventCount(), 1); JSONArray events = getUnsentEvents(1); // verify pre setUserId events JSONObject session_event = events.optJSONObject(0); assertEquals(session_event.optString("event_type"), "test"); assertEquals(session_event.optString("user_id"), "null"); assertEquals(session_event.optString("session_id"), String.valueOf(timestamp)); // log an event with new user id and session amplitude.logEventAsync("test", null, null, null, null, null, timestamp, false); Shadows.shadowOf(amplitude.logThread.getLooper()).runToEndOfTasks(); // verify post set user id assertEquals(getUnsentEventCount(), 2); JSONObject test_event = getLastEvent(); assertEquals(test_event.optString("event_type"), "test"); assertEquals(test_event.optString("user_id"), "test_new_user"); assertEquals(test_event.optLong("session_id"), amplitude.sessionId); // there should be a new session id at least assertTrue(amplitude.sessionId > timestamp); assertTrue(test_event.optLong("session_id") > timestamp); } }
/* * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.coyote.ajp; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InterruptedIOException; import java.net.InetAddress; import java.nio.ByteBuffer; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import org.apache.coyote.ActionCode; import org.apache.coyote.ActionHook; import org.apache.coyote.Adapter; import org.apache.coyote.InputBuffer; import org.apache.coyote.OutputBuffer; import org.apache.coyote.Request; import org.apache.coyote.RequestInfo; import org.apache.coyote.Response; import org.apache.tomcat.jni.Socket; import org.apache.tomcat.jni.Status; import org.apache.tomcat.util.buf.ByteChunk; import org.apache.tomcat.util.buf.HexUtils; import org.apache.tomcat.util.buf.MessageBytes; import org.apache.tomcat.util.http.HttpMessages; import org.apache.tomcat.util.http.MimeHeaders; import org.apache.tomcat.util.net.AprEndpoint; import org.apache.tomcat.util.res.StringManager; /** * Processes HTTP requests. * * @author Remy Maucherat * @author Henri Gomez * @author Dan Milstein * @author Keith Wannamaker * @author Kevin Seguin * @author Costin Manolache * @author Bill Barker */ public class AjpAprProcessor implements ActionHook { /** * Logger. */ protected static org.apache.juli.logging.Log log = org.apache.juli.logging.LogFactory.getLog(AjpAprProcessor.class); /** * The string manager for this package. */ protected static StringManager sm = StringManager.getManager(Constants.Package); // ----------------------------------------------------------- Constructors public AjpAprProcessor(int packetSize, AprEndpoint endpoint) { this.endpoint = endpoint; request = new Request(); request.setInputBuffer(new SocketInputBuffer()); response = new Response(); response.setHook(this); response.setOutputBuffer(new SocketOutputBuffer()); request.setResponse(response); requestHeaderMessage = new AjpMessage(packetSize); responseHeaderMessage = new AjpMessage(packetSize); bodyMessage = new AjpMessage(packetSize); if (endpoint.getFirstReadTimeout() > 0) { readTimeout = endpoint.getFirstReadTimeout() * 1000; } else { readTimeout = 100 * 1000; } // Allocate input and output buffers inputBuffer = ByteBuffer.allocateDirect(packetSize * 2); inputBuffer.limit(0); outputBuffer = ByteBuffer.allocateDirect(packetSize * 2); // Cause loading of HexUtils int foo = HexUtils.DEC[0]; // Cause loading of HttpMessages HttpMessages.getMessage(200); } // ----------------------------------------------------- Instance Variables /** * Associated adapter. */ protected Adapter adapter = null; /** * Request object. */ protected Request request = null; /** * Response object. */ protected Response response = null; /** * Header message. Note that this header is merely the one used during the * processing of the first message of a "request", so it might not be a request * header. It will stay unchanged during the processing of the whole request. */ protected AjpMessage requestHeaderMessage = null; /** * Message used for response header composition. */ protected AjpMessage responseHeaderMessage = null; /** * Body message. */ protected AjpMessage bodyMessage = null; /** * Body message. */ protected MessageBytes bodyBytes = MessageBytes.newInstance(); /** * State flag. */ protected boolean started = false; /** * Error flag. */ protected boolean error = false; /** * Socket associated with the current connection. */ protected long socket; /** * Host name (used to avoid useless B2C conversion on the host name). */ protected char[] hostNameC = new char[0]; /** * Associated endpoint. */ protected AprEndpoint endpoint; /** * The socket timeout used when reading the first block of the request * header. */ protected long readTimeout; /** * Temp message bytes used for processing. */ protected MessageBytes tmpMB = MessageBytes.newInstance(); /** * Byte chunk for certs. */ protected MessageBytes certificates = MessageBytes.newInstance(); /** * End of stream flag. */ protected boolean endOfStream = false; /** * Body empty flag. */ protected boolean empty = true; /** * First read. */ protected boolean first = true; /** * Replay read. */ protected boolean replay = false; /** * Finished response. */ protected boolean finished = false; /** * Direct buffer used for output. */ protected ByteBuffer outputBuffer = null; /** * Direct buffer used for input. */ protected ByteBuffer inputBuffer = null; /** * Direct buffer used for sending right away a get body message. */ protected static final ByteBuffer getBodyMessageBuffer; /** * Direct buffer used for sending right away a pong message. */ protected static final ByteBuffer pongMessageBuffer; /** * End message array. */ protected static final byte[] endMessageArray; // ----------------------------------------------------- Static Initializer static { // Set the get body message buffer AjpMessage getBodyMessage = new AjpMessage(128); getBodyMessage.reset(); getBodyMessage.appendByte(Constants.JK_AJP13_GET_BODY_CHUNK); getBodyMessage.appendInt(Constants.MAX_READ_SIZE); getBodyMessage.end(); getBodyMessageBuffer = ByteBuffer.allocateDirect(getBodyMessage.getLen()); getBodyMessageBuffer.put(getBodyMessage.getBuffer(), 0, getBodyMessage.getLen()); // Set the read body message buffer AjpMessage pongMessage = new AjpMessage(128); pongMessage.reset(); pongMessage.appendByte(Constants.JK_AJP13_CPONG_REPLY); pongMessage.end(); pongMessageBuffer = ByteBuffer.allocateDirect(pongMessage.getLen()); pongMessageBuffer.put(pongMessage.getBuffer(), 0, pongMessage.getLen()); // Allocate the end message array AjpMessage endMessage = new AjpMessage(128); endMessage.reset(); endMessage.appendByte(Constants.JK_AJP13_END_RESPONSE); endMessage.appendByte(1); endMessage.end(); endMessageArray = new byte[endMessage.getLen()]; System.arraycopy(endMessage.getBuffer(), 0, endMessageArray, 0, endMessage.getLen()); } // ------------------------------------------------------------- Properties /** * Use Tomcat authentication ? */ protected boolean tomcatAuthentication = true; public boolean getTomcatAuthentication() { return tomcatAuthentication; } public void setTomcatAuthentication(boolean tomcatAuthentication) { this.tomcatAuthentication = tomcatAuthentication; } /** * Required secret. */ protected String requiredSecret = null; public void setRequiredSecret(String requiredSecret) { this.requiredSecret = requiredSecret; } // --------------------------------------------------------- Public Methods /** Get the request associated with this processor. * * @return The request */ public Request getRequest() { return request; } /** * Process pipelined HTTP requests using the specified input and output * streams. * * @throws IOException error during an I/O operation */ public boolean process(long socket) throws IOException { RequestInfo rp = request.getRequestProcessor(); rp.setStage(org.apache.coyote.Constants.STAGE_PARSE); // Setting up the socket this.socket = socket; Socket.setrbb(this.socket, inputBuffer); Socket.setsbb(this.socket, outputBuffer); // Error flag error = false; int limit = 0; if (endpoint.getFirstReadTimeout() > 0) { limit = endpoint.getMaxThreads() / 2; } boolean openSocket = true; boolean keptAlive = false; while (started && !error) { // Parsing the request header try { // Get first message of the request if (!readMessage(requestHeaderMessage, true, keptAlive && (endpoint.getCurrentThreadsBusy() > limit))) { // This means that no data is available right now // (long keepalive), so that the processor should be recycled // and the method should return true rp.setStage(org.apache.coyote.Constants.STAGE_ENDED); break; } // Check message type, process right away and break if // not regular request processing int type = requestHeaderMessage.getByte(); if (type == Constants.JK_AJP13_CPING_REQUEST) { if (Socket.sendb(socket, pongMessageBuffer, 0, pongMessageBuffer.position()) < 0) { error = true; } continue; } else if(type != Constants.JK_AJP13_FORWARD_REQUEST) { // Usually the servlet didn't read the previous request body if(log.isDebugEnabled()) { log.debug("Unexpected message: "+type); } continue; } keptAlive = true; request.setStartTime(System.currentTimeMillis()); } catch (IOException e) { error = true; break; } catch (Throwable t) { log.debug(sm.getString("ajpprocessor.header.error"), t); // 400 - Bad Request response.setStatus(400); error = true; } // Setting up filters, and parse some request headers rp.setStage(org.apache.coyote.Constants.STAGE_PREPARE); try { prepareRequest(); } catch (Throwable t) { log.debug(sm.getString("ajpprocessor.request.prepare"), t); // 400 - Internal Server Error response.setStatus(400); error = true; } // Process the request in the adapter if (!error) { try { rp.setStage(org.apache.coyote.Constants.STAGE_SERVICE); adapter.service(request, response); } catch (InterruptedIOException e) { error = true; } catch (Throwable t) { log.error(sm.getString("ajpprocessor.request.process"), t); // 500 - Internal Server Error response.setStatus(500); error = true; } } // Finish the response if not done yet if (!finished) { try { finish(); } catch (Throwable t) { error = true; } } // If there was an error, make sure the request is counted as // and error, and update the statistics counter if (error) { response.setStatus(500); } request.updateCounters(); rp.setStage(org.apache.coyote.Constants.STAGE_KEEPALIVE); recycle(); } // Add the socket to the poller if (!error) { endpoint.getPoller().add(socket); } else { openSocket = false; } rp.setStage(org.apache.coyote.Constants.STAGE_ENDED); recycle(); return openSocket; } // ----------------------------------------------------- ActionHook Methods /** * Send an action to the connector. * * @param actionCode Type of the action * @param param Action parameter */ public void action(ActionCode actionCode, Object param) { if (actionCode == ActionCode.ACTION_COMMIT) { if (response.isCommitted()) return; // Validate and write response headers try { prepareResponse(); } catch (IOException e) { // Set error flag error = true; } } else if (actionCode == ActionCode.ACTION_CLIENT_FLUSH) { if (!response.isCommitted()) { // Validate and write response headers try { prepareResponse(); } catch (IOException e) { // Set error flag error = true; return; } } try { flush(); } catch (IOException e) { // Set error flag error = true; } } else if (actionCode == ActionCode.ACTION_CLOSE) { // Close // End the processing of the current request, and stop any further // transactions with the client try { finish(); } catch (IOException e) { // Set error flag error = true; } } else if (actionCode == ActionCode.ACTION_START) { started = true; } else if (actionCode == ActionCode.ACTION_STOP) { started = false; } else if (actionCode == ActionCode.ACTION_REQ_SSL_ATTRIBUTE ) { if (!certificates.isNull()) { ByteChunk certData = certificates.getByteChunk(); X509Certificate jsseCerts[] = null; ByteArrayInputStream bais = new ByteArrayInputStream(certData.getBytes(), certData.getStart(), certData.getLength()); // Fill the first element. try { CertificateFactory cf = CertificateFactory.getInstance("X.509"); X509Certificate cert = (X509Certificate) cf.generateCertificate(bais); jsseCerts = new X509Certificate[1]; jsseCerts[0] = cert; request.setAttribute(AprEndpoint.CERTIFICATE_KEY, jsseCerts); } catch (java.security.cert.CertificateException e) { log.error(sm.getString("ajpprocessor.certs.fail"), e); return; } } } else if (actionCode == ActionCode.ACTION_REQ_HOST_ATTRIBUTE) { // Get remote host name using a DNS resolution if (request.remoteHost().isNull()) { try { request.remoteHost().setString(InetAddress.getByName (request.remoteAddr().toString()).getHostName()); } catch (IOException iex) { // Ignore } } } else if (actionCode == ActionCode.ACTION_REQ_LOCAL_ADDR_ATTRIBUTE) { // Copy from local name for now, which should simply be an address request.localAddr().setString(request.localName().toString()); } else if (actionCode == ActionCode.ACTION_REQ_SET_BODY_REPLAY) { // Set the given bytes as the content ByteChunk bc = (ByteChunk) param; bodyBytes.setBytes(bc.getBytes(), bc.getStart(), bc.getLength()); request.setContentLength(bc.getLength()); first = false; empty = false; replay = true; } } // ------------------------------------------------------ Connector Methods /** * Set the associated adapter. * * @param adapter the new adapter */ public void setAdapter(Adapter adapter) { this.adapter = adapter; } /** * Get the associated adapter. * * @return the associated adapter */ public Adapter getAdapter() { return adapter; } // ------------------------------------------------------ Protected Methods /** * After reading the request headers, we have to setup the request filters. */ protected void prepareRequest() { // Translate the HTTP method code to a String. byte methodCode = requestHeaderMessage.getByte(); if (methodCode != Constants.SC_M_JK_STORED) { String methodName = Constants.methodTransArray[(int)methodCode - 1]; request.method().setString(methodName); } requestHeaderMessage.getBytes(request.protocol()); requestHeaderMessage.getBytes(request.requestURI()); requestHeaderMessage.getBytes(request.remoteAddr()); requestHeaderMessage.getBytes(request.remoteHost()); requestHeaderMessage.getBytes(request.localName()); request.setLocalPort(requestHeaderMessage.getInt()); boolean isSSL = requestHeaderMessage.getByte() != 0; if (isSSL) { request.scheme().setString("https"); } // Decode headers MimeHeaders headers = request.getMimeHeaders(); int hCount = requestHeaderMessage.getInt(); for(int i = 0 ; i < hCount ; i++) { String hName = null; // Header names are encoded as either an integer code starting // with 0xA0, or as a normal string (in which case the first // two bytes are the length). int isc = requestHeaderMessage.peekInt(); int hId = isc & 0xFF; MessageBytes vMB = null; isc &= 0xFF00; if(0xA000 == isc) { requestHeaderMessage.getInt(); // To advance the read position hName = Constants.headerTransArray[hId - 1]; vMB = headers.addValue(hName); } else { // reset hId -- if the header currently being read // happens to be 7 or 8 bytes long, the code below // will think it's the content-type header or the // content-length header - SC_REQ_CONTENT_TYPE=7, // SC_REQ_CONTENT_LENGTH=8 - leading to unexpected // behaviour. see bug 5861 for more information. hId = -1; requestHeaderMessage.getBytes(tmpMB); ByteChunk bc = tmpMB.getByteChunk(); vMB = headers.addValue(bc.getBuffer(), bc.getStart(), bc.getLength()); } requestHeaderMessage.getBytes(vMB); if (hId == Constants.SC_REQ_CONTENT_LENGTH || (hId == -1 && tmpMB.equalsIgnoreCase("Content-Length"))) { // just read the content-length header, so set it request.setContentLength( vMB.getInt() ); } else if (hId == Constants.SC_REQ_CONTENT_TYPE || (hId == -1 && tmpMB.equalsIgnoreCase("Content-Type"))) { // just read the content-type header, so set it ByteChunk bchunk = vMB.getByteChunk(); request.contentType().setBytes(bchunk.getBytes(), bchunk.getOffset(), bchunk.getLength()); } } // Decode extra attributes boolean secret = false; byte attributeCode; while ((attributeCode = requestHeaderMessage.getByte()) != Constants.SC_A_ARE_DONE) { switch (attributeCode) { case Constants.SC_A_REQ_ATTRIBUTE : requestHeaderMessage.getBytes(tmpMB); String n = tmpMB.toString(); requestHeaderMessage.getBytes(tmpMB); String v = tmpMB.toString(); request.setAttribute(n, v); break; case Constants.SC_A_CONTEXT : requestHeaderMessage.getBytes(tmpMB); // nothing break; case Constants.SC_A_SERVLET_PATH : requestHeaderMessage.getBytes(tmpMB); // nothing break; case Constants.SC_A_REMOTE_USER : if (tomcatAuthentication) { // ignore server requestHeaderMessage.getBytes(tmpMB); } else { requestHeaderMessage.getBytes(request.getRemoteUser()); } break; case Constants.SC_A_AUTH_TYPE : if (tomcatAuthentication) { // ignore server requestHeaderMessage.getBytes(tmpMB); } else { requestHeaderMessage.getBytes(request.getAuthType()); } break; case Constants.SC_A_QUERY_STRING : requestHeaderMessage.getBytes(request.queryString()); break; case Constants.SC_A_JVM_ROUTE : requestHeaderMessage.getBytes(request.instanceId()); break; case Constants.SC_A_SSL_CERT : request.scheme().setString("https"); // SSL certificate extraction is lazy, moved to JkCoyoteHandler requestHeaderMessage.getBytes(certificates); break; case Constants.SC_A_SSL_CIPHER : request.scheme().setString("https"); requestHeaderMessage.getBytes(tmpMB); request.setAttribute(AprEndpoint.CIPHER_SUITE_KEY, tmpMB.toString()); break; case Constants.SC_A_SSL_SESSION : request.scheme().setString("https"); requestHeaderMessage.getBytes(tmpMB); request.setAttribute(AprEndpoint.SESSION_ID_KEY, tmpMB.toString()); break; case Constants.SC_A_SSL_KEY_SIZE : request.setAttribute(AprEndpoint.KEY_SIZE_KEY, new Integer(requestHeaderMessage.getInt())); break; case Constants.SC_A_STORED_METHOD: requestHeaderMessage.getBytes(request.method()); break; case Constants.SC_A_SECRET: requestHeaderMessage.getBytes(tmpMB); if (requiredSecret != null) { secret = true; if (!tmpMB.equals(requiredSecret)) { response.setStatus(403); error = true; } } break; default: // Ignore unknown attribute for backward compatibility break; } } // Check if secret was submitted if required if ((requiredSecret != null) && !secret) { response.setStatus(403); error = true; } // Check for a full URI (including protocol://host:port/) ByteChunk uriBC = request.requestURI().getByteChunk(); if (uriBC.startsWithIgnoreCase("http", 0)) { int pos = uriBC.indexOf("://", 0, 3, 4); int uriBCStart = uriBC.getStart(); int slashPos = -1; if (pos != -1) { byte[] uriB = uriBC.getBytes(); slashPos = uriBC.indexOf('/', pos + 3); if (slashPos == -1) { slashPos = uriBC.getLength(); // Set URI as "/" request.requestURI().setBytes (uriB, uriBCStart + pos + 1, 1); } else { request.requestURI().setBytes (uriB, uriBCStart + slashPos, uriBC.getLength() - slashPos); } MessageBytes hostMB = headers.setValue("host"); hostMB.setBytes(uriB, uriBCStart + pos + 3, slashPos - pos - 3); } } MessageBytes valueMB = request.getMimeHeaders().getValue("host"); parseHost(valueMB); } /** * Parse host. */ public void parseHost(MessageBytes valueMB) { if (valueMB == null || (valueMB != null && valueMB.isNull()) ) { // HTTP/1.0 // Default is what the socket tells us. Overriden if a host is // found/parsed request.setServerPort(endpoint.getPort()); return; } ByteChunk valueBC = valueMB.getByteChunk(); byte[] valueB = valueBC.getBytes(); int valueL = valueBC.getLength(); int valueS = valueBC.getStart(); int colonPos = -1; if (hostNameC.length < valueL) { hostNameC = new char[valueL]; } boolean ipv6 = (valueB[valueS] == '['); boolean bracketClosed = false; for (int i = 0; i < valueL; i++) { char b = (char) valueB[i + valueS]; hostNameC[i] = b; if (b == ']') { bracketClosed = true; } else if (b == ':') { if (!ipv6 || bracketClosed) { colonPos = i; break; } } } if (colonPos < 0) { if (request.scheme().equalsIgnoreCase("https")) { // 443 - Default HTTPS port request.setServerPort(443); } else { // 80 - Default HTTTP port request.setServerPort(80); } request.serverName().setChars(hostNameC, 0, valueL); } else { request.serverName().setChars(hostNameC, 0, colonPos); int port = 0; int mult = 1; for (int i = valueL - 1; i > colonPos; i--) { int charValue = HexUtils.DEC[(int) valueB[i + valueS]]; if (charValue == -1) { // Invalid character error = true; // 400 - Bad request response.setStatus(400); break; } port = port + (charValue * mult); mult = 10 * mult; } request.setServerPort(port); } } /** * When committing the response, we have to validate the set of headers, as * well as setup the response filters. */ protected void prepareResponse() throws IOException { response.setCommitted(true); responseHeaderMessage.reset(); responseHeaderMessage.appendByte(Constants.JK_AJP13_SEND_HEADERS); // HTTP header contents responseHeaderMessage.appendInt(response.getStatus()); String message = response.getMessage(); if (message == null){ message = HttpMessages.getMessage(response.getStatus()); } else { message = message.replace('\n', ' ').replace('\r', ' '); } tmpMB.setString(message); responseHeaderMessage.appendBytes(tmpMB); // Special headers MimeHeaders headers = response.getMimeHeaders(); String contentType = response.getContentType(); if (contentType != null) { headers.setValue("Content-Type").setString(contentType); } String contentLanguage = response.getContentLanguage(); if (contentLanguage != null) { headers.setValue("Content-Language").setString(contentLanguage); } int contentLength = response.getContentLength(); if (contentLength >= 0) { headers.setValue("Content-Length").setInt(contentLength); } // Other headers int numHeaders = headers.size(); responseHeaderMessage.appendInt(numHeaders); for (int i = 0; i < numHeaders; i++) { MessageBytes hN = headers.getName(i); responseHeaderMessage.appendBytes(hN); MessageBytes hV=headers.getValue(i); responseHeaderMessage.appendBytes(hV); } // Write to buffer responseHeaderMessage.end(); outputBuffer.put(responseHeaderMessage.getBuffer(), 0, responseHeaderMessage.getLen()); } /** * Finish AJP response. */ protected void finish() throws IOException { if (!response.isCommitted()) { // Validate and write response headers try { prepareResponse(); } catch (IOException e) { // Set error flag error = true; } } if (finished) return; finished = true; // Add the end message if (outputBuffer.position() + endMessageArray.length > outputBuffer.capacity()) { flush(); } outputBuffer.put(endMessageArray); flush(); } /** * Read at least the specified amount of bytes, and place them * in the input buffer. */ protected boolean read(int n) throws IOException { if (inputBuffer.capacity() - inputBuffer.limit() <= n - inputBuffer.remaining()) { inputBuffer.compact(); inputBuffer.limit(inputBuffer.position()); inputBuffer.position(0); } while (inputBuffer.remaining() < n) { int nRead = Socket.recvbb (socket, inputBuffer.limit(), inputBuffer.capacity() - inputBuffer.limit()); if (nRead > 0) { inputBuffer.limit(inputBuffer.limit() + nRead); } else { throw new IOException(sm.getString("ajpprotocol.failedread")); } } return true; } /** * Read at least the specified amount of bytes, and place them * in the input buffer. */ protected boolean readt(int n, boolean useAvailableData) throws IOException { if (useAvailableData && inputBuffer.remaining() == 0) { return false; } if (inputBuffer.capacity() - inputBuffer.limit() <= n - inputBuffer.remaining()) { inputBuffer.compact(); inputBuffer.limit(inputBuffer.position()); inputBuffer.position(0); } while (inputBuffer.remaining() < n) { int nRead = Socket.recvbbt (socket, inputBuffer.limit(), inputBuffer.capacity() - inputBuffer.limit(), readTimeout); if (nRead > 0) { inputBuffer.limit(inputBuffer.limit() + nRead); } else { if ((-nRead) == Status.ETIMEDOUT || (-nRead) == Status.TIMEUP) { return false; } else { throw new IOException(sm.getString("ajpprotocol.failedread")); } } } return true; } /** Receive a chunk of data. Called to implement the * 'special' packet in ajp13 and to receive the data * after we send a GET_BODY packet */ public boolean receive() throws IOException { first = false; bodyMessage.reset(); readMessage(bodyMessage, false, false); // No data received. if (bodyMessage.getLen() == 0) { // just the header // Don't mark 'end of stream' for the first chunk. return false; } int blen = bodyMessage.peekInt(); if (blen == 0) { return false; } bodyMessage.getBytes(bodyBytes); empty = false; return true; } /** * Get more request body data from the web server and store it in the * internal buffer. * * @return true if there is more data, false if not. */ private boolean refillReadBuffer() throws IOException { // If the server returns an empty packet, assume that that end of // the stream has been reached (yuck -- fix protocol??). // FORM support if (replay) { endOfStream = true; // we've read everything there is } if (endOfStream) { return false; } // Request more data immediately Socket.sendb(socket, getBodyMessageBuffer, 0, getBodyMessageBuffer.position()); boolean moreData = receive(); if( !moreData ) { endOfStream = true; } return moreData; } /** * Read an AJP message. * * @param first is true if the message is the first in the request, which * will cause a short duration blocking read * @return true if the message has been read, false if the short read * didn't return anything * @throws IOException any other failure, including incomplete reads */ protected boolean readMessage(AjpMessage message, boolean first, boolean useAvailableData) throws IOException { byte[] buf = message.getBuffer(); int headerLength = message.getHeaderLength(); if (first) { if (!readt(headerLength, useAvailableData)) { return false; } } else { read(headerLength); } inputBuffer.get(message.getBuffer(), 0, headerLength); message.processHeader(); read(message.getLen()); inputBuffer.get(message.getBuffer(), headerLength, message.getLen()); return true; } /** * Recycle the processor. */ public void recycle() { // Recycle Request object first = true; endOfStream = false; empty = true; replay = false; finished = false; request.recycle(); response.recycle(); certificates.recycle(); inputBuffer.clear(); inputBuffer.limit(0); outputBuffer.clear(); } /** * Callback to write data from the buffer. */ protected void flush() throws IOException { if (outputBuffer.position() > 0) { if (Socket.sendbb(socket, 0, outputBuffer.position()) < 0) { throw new IOException(); } outputBuffer.clear(); } } // ------------------------------------- InputStreamInputBuffer Inner Class /** * This class is an input buffer which will read its data from an input * stream. */ protected class SocketInputBuffer implements InputBuffer { /** * Read bytes into the specified chunk. */ public int doRead(ByteChunk chunk, Request req ) throws IOException { if (endOfStream) { return -1; } if (first && req.getContentLength() > 0) { // Handle special first-body-chunk if (!receive()) { return 0; } } else if (empty) { if (!refillReadBuffer()) { return -1; } } ByteChunk bc = bodyBytes.getByteChunk(); chunk.setBytes(bc.getBuffer(), bc.getStart(), bc.getLength()); empty = true; return chunk.getLength(); } } // ----------------------------------- OutputStreamOutputBuffer Inner Class /** * This class is an output buffer which will write data to an output * stream. */ protected class SocketOutputBuffer implements OutputBuffer { /** * Write chunk. */ public int doWrite(ByteChunk chunk, Response res) throws IOException { if (!response.isCommitted()) { // Validate and write response headers try { prepareResponse(); } catch (IOException e) { // Set error flag error = true; } } int len = chunk.getLength(); // 4 - hardcoded, byte[] marshalling overhead int chunkSize = Constants.MAX_SEND_SIZE; int off = 0; while (len > 0) { int thisTime = len; if (thisTime > chunkSize) { thisTime = chunkSize; } len -= thisTime; if (outputBuffer.position() + thisTime + Constants.H_SIZE + 4 > outputBuffer.capacity()) { flush(); } outputBuffer.put((byte) 0x41); outputBuffer.put((byte) 0x42); outputBuffer.putShort((short) (thisTime + 4)); outputBuffer.put(Constants.JK_AJP13_SEND_BODY_CHUNK); outputBuffer.putShort((short) thisTime); outputBuffer.put(chunk.getBytes(), chunk.getOffset() + off, thisTime); outputBuffer.put((byte) 0x00); off += thisTime; } return chunk.getLength(); } } }
package in.twizmwaz.cardinal.module.modules.destroyable; import in.twizmwaz.cardinal.match.Match; import in.twizmwaz.cardinal.module.BuilderData; import in.twizmwaz.cardinal.module.ModuleBuilder; import in.twizmwaz.cardinal.module.ModuleCollection; import in.twizmwaz.cardinal.module.ModuleLoadTime; import in.twizmwaz.cardinal.module.modules.regions.RegionModule; import in.twizmwaz.cardinal.module.modules.regions.RegionModuleBuilder; import in.twizmwaz.cardinal.module.modules.regions.type.combinations.UnionRegion; import in.twizmwaz.cardinal.module.modules.team.TeamModule; import in.twizmwaz.cardinal.util.NumUtils; import in.twizmwaz.cardinal.util.TeamUtils; import org.bukkit.Material; import org.jdom2.Element; import java.util.ArrayList; import java.util.List; @BuilderData(load = ModuleLoadTime.EARLIER) public class DestroyableObjectiveBuilder implements ModuleBuilder { @Override public ModuleCollection load(Match match) { ModuleCollection result = new ModuleCollection(); for (Element element : match.getDocument().getRootElement().getChildren("destroyables")) { for (Element subElement : element.getChildren("destroyable")) { TeamModule owner; try { owner = TeamUtils.getTeamById(subElement.getAttributeValue("owner")); } catch (NullPointerException e) { owner = TeamUtils.getTeamById(element.getAttributeValue("owner")); } String name = "Monument"; if (subElement.getAttributeValue("name") != null) { name = subElement.getAttributeValue("name"); } else if (element.getAttributeValue("name") != null) { name = element.getAttributeValue("name"); } String id = null; if (subElement.getAttributeValue("id") != null) { id = subElement.getAttributeValue("id"); } else if (element.getAttributeValue("id") != null) { id = element.getAttributeValue("id"); } ModuleCollection<RegionModule> regions = new ModuleCollection<>(); if (subElement.getAttributeValue("region") != null) { regions.add(RegionModuleBuilder.getRegion(subElement.getAttributeValue("region"))); } else { for (Element region : subElement.getChildren()) { regions.add(RegionModuleBuilder.getRegion(region)); } } List<Material> types = new ArrayList<>(); List<Integer> damageValues = new ArrayList<>(); if (subElement.getAttributeValue("materials") != null) { String materials = subElement.getAttributeValue("materials"); if (materials.contains(";")) { for (String material : materials.split(";")) { if (material.contains(":")) { types.add(Material.matchMaterial(material.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(material.split(":")[1].trim())); } else { types.add(Material.matchMaterial(material.trim())); damageValues.add(-1); } } } else { if (materials.contains(":")) { types.add(Material.matchMaterial(materials.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(materials.split(":")[1].trim())); } else { types.add(Material.matchMaterial(materials.trim())); damageValues.add(-1); } } } else if (element.getAttributeValue("materials") != null) { String materials = element.getAttributeValue("materials"); if (materials.contains(";")) { for (String material : materials.split(";")) { if (material.contains(":")) { types.add(Material.matchMaterial(material.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(material.split(":")[1].trim())); } else { types.add(Material.matchMaterial(material.trim())); damageValues.add(0); } } } else { if (materials.contains(":")) { types.add(Material.matchMaterial(materials.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(materials.split(":")[1].trim())); } else { types.add(Material.matchMaterial(materials.trim())); damageValues.add(0); } } } double required = 1.0; if (subElement.getAttributeValue("completion") != null) { required = Double.parseDouble(subElement.getAttributeValue("completion").replaceAll("%", "").replaceAll(" ", "")) / 100.0; } else if (element.getAttributeValue("completion") != null) { required = Double.parseDouble(element.getAttributeValue("completion").replaceAll("%", "").replaceAll(" ", "")) / 100.0; } boolean showProgress = false; if (subElement.getAttributeValue("show-progress") != null) { showProgress = subElement.getAttributeValue("show-progress").equalsIgnoreCase("true"); } else if (element.getAttributeValue("show-progress") != null) { showProgress = element.getAttributeValue("show-progress").equalsIgnoreCase("true"); } boolean repairable = false; if (subElement.getAttributeValue("repairable") != null) { repairable = subElement.getAttributeValue("repairable").equalsIgnoreCase("true"); } else if (element.getAttributeValue("repairable") != null) { repairable = element.getAttributeValue("repairable").equalsIgnoreCase("true"); } boolean show = true; if (subElement.getAttributeValue("show") != null) { show = !subElement.getAttributeValue("show").equalsIgnoreCase("false"); } else if (element.getAttributeValue("show") != null) { show = !element.getAttributeValue("show").equalsIgnoreCase("false"); } boolean changesModes = false; if (subElement.getAttributeValue("mode-changes") != null) { changesModes = subElement.getAttributeValue("mode-changes").equalsIgnoreCase("true"); } else if (element.getAttributeValue("mode-changes") != null) { changesModes = element.getAttributeValue("mode-changes").equalsIgnoreCase("true"); } result.add(new DestroyableObjective(owner, name, id, new UnionRegion(null, regions), types, damageValues, required, show, changesModes, showProgress, repairable)); } for (Element child : element.getChildren("destroyables")) { for (Element subChild : child.getChildren("destroyable")) { TeamModule owner; try { owner = TeamUtils.getTeamById(subChild.getAttributeValue("owner")); } catch (NullPointerException e) { try { owner = TeamUtils.getTeamById(child.getAttributeValue("owner")); } catch (NullPointerException exc) { owner = TeamUtils.getTeamById(element.getAttributeValue("owner")); } } String name = "Monument"; if (subChild.getAttributeValue("name") != null) { name = subChild.getAttributeValue("name"); } else if (child.getAttributeValue("name") != null) { name = child.getAttributeValue("name"); } else if (child.getAttributeValue("name") != null) { name = element.getAttributeValue("name"); } String id = null; if (subChild.getAttributeValue("id") != null) { id = subChild.getAttributeValue("id"); } else if (child.getAttributeValue("id") != null) { id = child.getAttributeValue("id"); } else if (child.getAttributeValue("id") != null) { id = element.getAttributeValue("id"); } ModuleCollection<RegionModule> regions = new ModuleCollection<>(); if (subChild.getAttributeValue("region") != null) { regions.add(RegionModuleBuilder.getRegion(subChild)); } else { for (Element region : subChild.getChildren()) { regions.add(RegionModuleBuilder.getRegion(region)); } } List<Material> types = new ArrayList<>(); List<Integer> damageValues = new ArrayList<>(); if (subChild.getAttributeValue("materials") != null) { String materials = subChild.getAttributeValue("materials"); if (materials.contains(";")) { for (String material : materials.split(";")) { if (material.contains(":")) { types.add(Material.matchMaterial(material.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(material.split(":")[1].trim())); } else { types.add(Material.matchMaterial(material.trim())); damageValues.add(-1); } } } else { if (materials.contains(":")) { types.add(Material.matchMaterial(materials.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(materials.split(":")[1].trim())); } else { types.add(Material.matchMaterial(materials.trim())); damageValues.add(-1); } } } else if (child.getAttributeValue("materials") != null) { String materials = child.getAttributeValue("materials"); if (materials.contains(";")) { for (String material : materials.split(";")) { if (material.contains(":")) { types.add(Material.matchMaterial(material.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(material.split(":")[1].trim())); } else { types.add(Material.matchMaterial(material.trim())); damageValues.add(-1); } } } else { if (materials.contains(":")) { types.add(Material.matchMaterial(materials.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(materials.split(":")[1].trim())); } else { types.add(Material.matchMaterial(materials.trim())); damageValues.add(0); } } } else if (element.getAttributeValue("materials") != null) { String materials = element.getAttributeValue("materials"); if (materials.contains(";")) { for (String material : materials.split(";")) { if (material.contains(":")) { types.add(Material.matchMaterial(material.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(material.split(":")[1].trim())); } else { types.add(Material.matchMaterial(material.trim())); damageValues.add(0); } } } else { if (materials.contains(":")) { types.add(Material.matchMaterial(materials.split(":")[0].trim())); damageValues.add(NumUtils.parseInt(materials.split(":")[1].trim())); } else { types.add(Material.matchMaterial(materials.trim())); damageValues.add(0); } } } double required = 1.0; if (subChild.getAttributeValue("completion") != null) { required = Double.parseDouble(subChild.getAttributeValue("completion").replaceAll("%", "").replaceAll(" ", "")) / 100.0; } else if (child.getAttributeValue("completion") != null) { required = Double.parseDouble(child.getAttributeValue("completion").replaceAll("%", "").replaceAll(" ", "")) / 100.0; } else if (element.getAttributeValue("completion") != null) { required = Double.parseDouble(element.getAttributeValue("completion").replaceAll("%", "").replaceAll(" ", "")) / 100.0; } boolean showProgress = false; if (subChild.getAttributeValue("show-progress") != null) { showProgress = subChild.getAttributeValue("show-progress").equalsIgnoreCase("true"); } else if (child.getAttributeValue("show-progress") != null) { showProgress = child.getAttributeValue("show-progress").equalsIgnoreCase("true"); } else if (element.getAttributeValue("show-progress") != null) { showProgress = element.getAttributeValue("show-progress").equalsIgnoreCase("true"); } boolean repairable = false; if (subChild.getAttributeValue("repairable") != null) { repairable = subChild.getAttributeValue("repairable").equalsIgnoreCase("true"); } else if (child.getAttributeValue("repairable") != null) { repairable = child.getAttributeValue("repairable").equalsIgnoreCase("true"); } else if (element.getAttributeValue("repairable") != null) { repairable = element.getAttributeValue("repairable").equalsIgnoreCase("true"); } boolean show = true; if (subChild.getAttributeValue("show") != null) { show = !subChild.getAttributeValue("show").equalsIgnoreCase("false"); } else if (child.getAttributeValue("show") != null) { show = !child.getAttributeValue("show").equalsIgnoreCase("false"); } else if (element.getAttributeValue("show") != null) { show = !element.getAttributeValue("show").equalsIgnoreCase("false"); } boolean changesModes = false; if (subChild.getAttributeValue("mode-changes") != null) { changesModes = subChild.getAttributeValue("mode-changes").equalsIgnoreCase("true"); } else if (child.getAttributeValue("mode-changes") != null) { changesModes = child.getAttributeValue("mode-changes").equalsIgnoreCase("true"); } else if (element.getAttributeValue("mode-changes") != null) { changesModes = element.getAttributeValue("mode-changes").equalsIgnoreCase("true"); } result.add(new DestroyableObjective(owner, name, id, new UnionRegion(null, regions), types, damageValues, required, show, changesModes, showProgress, repairable)); } } } return result; } }
package io.craigmiller160.schedule.entity; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Convert; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.Table; import org.hibernate.annotations.NamedNativeQueries; import org.hibernate.annotations.NamedNativeQuery; import io.craigmiller160.schedule.util.LocalDateConverter; //TODO document this... possibly move it to a better location too @NamedNativeQueries({ @NamedNativeQuery( name="studentsByIndexRangeProcedure", query="call students_by_index_range (:startIndex, :endIndex)", resultClass=Student.class) }) /** * An entity that defines a student taking courses. It contains * the attributes that define the student, as well as a list * of all courses the student is taking. * <p> * <b>THREAD SAFETY:</b> This class is NOT thread-safe. Its state * is not synchronized, and instances of it should be handled in * a way that is not shared between threads. * * @author craig * @version 1.0 */ @Entity @Table (name="student") public class Student implements Comparable<Student>{ /** * The id of the student. */ @Id @GeneratedValue (strategy=GenerationType.AUTO) @Column (name="student_id") private int studentId; /** * The first name of the student. */ @Column (name="first_name") private String firstName; /** * The last name of the student. */ @Column (name="last_name") private String lastName; /** * The birth date of the student. */ @Column (name="birth_date") @Convert (converter=LocalDateConverter.class) private LocalDate birthDate; /** * The gender of the student. */ private char gender; /** * The grade of the student. */ private int grade; /** * The list of courses this student is taking. */ @ManyToMany (cascade=CascadeType.ALL) //TODO not sure if cascading is appropriate here or not @JoinTable (name="student_course", joinColumns={@JoinColumn (name="student_id")}, inverseJoinColumns={@JoinColumn (name="course_id")}) private List<Course> courses = new ArrayList<>(); //TODO might want to change this to be a set... debating this /** * Create a new student with none of its properties set. */ public Student() {} /** * Create a new student and set all properties * but its id. * * @param firstName the first name of the student. * @param lastName the last name of the student. * @param birthDate the birth date of the student. * @param gender the gender of the student. * @param grade the grade of the student. */ public Student(String firstName, String lastName, LocalDate birthDate, char gender, int grade){ this.firstName = firstName; this.lastName = lastName; this.birthDate = birthDate; this.grade = grade; this.gender = gender; } /** * Get the id of the student. * * @return the id of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public int getStudentId() { return studentId; } /** * Set the id of the student. * * @param studentId the id of the student. */ public void setStudentId(int studentId) { this.studentId = studentId; } /** * Get the first name of the student. * * @return the first name of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public String getFirstName() { return firstName; } /** * Set the first name of the student. * * @param firstName the first name of the student. */ public void setFirstName(String firstName) { this.firstName = firstName; } /** * Get the last name of the student. * * @return the last name of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public String getLastName() { return lastName; } /** * Set the last name of the student. * * @param lastName the last name of the student. */ public void setLastName(String lastName) { this.lastName = lastName; } /** * Get the birth date of the student. * * @return the birth date of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public LocalDate getBirthDate() { return birthDate; } /** * Set the birth date of the student. * * @param birthDate the birth date of the student. */ public void setBirthDate(LocalDate birthDate) { this.birthDate = birthDate; } /** * Get the grade of the student. * * @return the grade of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public int getGrade() { return grade; } /** * Set the grade of the student. * * @param grade the grade of the student. */ public void setGrade(int grade) { this.grade = grade; } /** * Add a course to the list of courses this student is taking. * * @param course the course to add. * @return true if the course is added successfully. * @throws NullPointerException if the list of courses was * not properly instantiated. */ public boolean addCourse(Course course){ //TODO can't put add operation here because endless stackoverflow loop return courses.add(course); } /** * Remove a course from the list of courses this student is taking. * * @param course the course to remove. * @return true if this course was removed successfully. * @throws NullPointerException if the list of courses was * not properly instantiated. */ public boolean removeCourse(Course course){ //TODO need to have a remove operation reflected on both sides return courses.remove(course); } /** * Get the list of courses this student is taking. * * @return the list of courses this student is taking. * @throws NullPointerException if the field * being retrieved was not set. */ public List<Course> getCourses() { return courses; } /** * Set the list of courses this student is taking. * * @param courses the list of courses this student is taking. */ public void setCourses(List<Course> courses) { this.courses = courses; } /** * Get the gender of the student. * * @return the gender of the student. * @throws NullPointerException if the field * being retrieved was not set. */ public char getGender() { return gender; } /** * Set the gender of the student. * * @param gender the gender of the student. */ public void setGender(char gender) { this.gender = gender; } @Override public String toString(){ return firstName + " " + lastName; } @Override public int hashCode(){ return studentId; } @Override public boolean equals(Object obj){ if(obj instanceof Student){ return ((Student) obj).studentId == this.studentId; } else{ return false; } } @Override public int compareTo(Student student) { return ((Integer) this.studentId) .compareTo((Integer) student.studentId); } }
/* * Copyright 2015 iychoi. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.iplantcollaborative; import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.iplantcollaborative.AccessPermissionCache.AccessPermissionKey; import org.iplantcollaborative.conf.DataStoreConf; import org.iplantcollaborative.datastore.msg.ADataStoreMessage; import org.iplantcollaborative.datastore.msg.CollectionAclMod; import org.iplantcollaborative.datastore.msg.CollectionAdd; import org.iplantcollaborative.datastore.msg.CollectionMetadataAdd; import org.iplantcollaborative.datastore.msg.CollectionMv; import org.iplantcollaborative.datastore.msg.CollectionRm; import org.iplantcollaborative.datastore.msg.DataObjectAclMod; import org.iplantcollaborative.datastore.msg.DataObjectAdd; import org.iplantcollaborative.datastore.msg.DataObjectMetadataAdd; import org.iplantcollaborative.datastore.msg.DataObjectMetadataMod; import org.iplantcollaborative.datastore.msg.DataObjectMod; import org.iplantcollaborative.datastore.msg.DataObjectMv; import org.iplantcollaborative.datastore.msg.DataObjectRm; import org.iplantcollaborative.irods.DataStoreClient; import org.iplantcollaborative.irods.DataStoreClientManager; import org.iplantcollaborative.lease.Client; import org.iplantcollaborative.utils.JsonSerializer; import org.iplantcollaborative.utils.PathUtils; /** * * @author iychoi */ public class DataStoreMessageProcessor implements Closeable { private static final Log LOG = LogFactory.getLog(DataStoreMessageProcessor.class); private Binder binder; private JsonSerializer serializer; private DataStoreClientManager datastoreClientManager; private UUIDCache uuidCache; private AccessPermissionCache accessPermissionCache; public DataStoreMessageProcessor(DataStoreConf datastoreConf, Binder binder) { if(datastoreConf == null) { throw new IllegalArgumentException("datastoreConf is null"); } if(binder == null) { throw new IllegalArgumentException("binder is null"); } this.binder = binder; binder.setProcessor(this); this.serializer = new JsonSerializer(); this.uuidCache = new UUIDCache(); this.accessPermissionCache = new AccessPermissionCache(); this.datastoreClientManager = new DataStoreClientManager(datastoreConf); } public void connect() throws IOException { } public void process(String routingKey, String message) { try { ADataStoreMessage dsMsg = createJsonMessageObject(routingKey, message); if(dsMsg != null) { if(this.binder.getClientRegistrar() != null) { List<Client> acceptedClients = listAcceptedClients(dsMsg); if(!acceptedClients.isEmpty()) { Message msg = new Message(); String msgbody = this.serializer.toJson(dsMsg); msg.addRecipient(acceptedClients); msg.setMessageBody(msgbody); MessagePublisher publisher = this.binder.getPublisher(); if(publisher != null) { try { publisher.publish(msg); } catch (IOException ex) { LOG.error("Exception occurred while publishing a message", ex); } } else { LOG.error("processor not registered"); } } } else { LOG.error("client registrar not registered"); } } else { LOG.info("cannot process a message " + routingKey); } } catch (Exception ex) { LOG.info(message); LOG.error("Exception occurred while processing a message", ex); } } private ADataStoreMessage createJsonMessageObject(String routingKey, String message) throws IOException { ADataStoreMessage dsMsg = null; switch(routingKey) { case "collection.add": { CollectionAdd msg = (CollectionAdd) this.serializer.fromJson(message, CollectionAdd.class); msg.setEntityPath(msg.getPath()); dsMsg = msg; } break; case "collection.rm": { CollectionRm msg = (CollectionRm) this.serializer.fromJson(message, CollectionRm.class); msg.setEntityPath(msg.getPath()); dsMsg = msg; } break; case "collection.mv": { CollectionMv msg = (CollectionMv) this.serializer.fromJson(message, CollectionMv.class); msg.setEntityPath(msg.getNewPath()); dsMsg = msg; } break; case "collection.acl.mod": { CollectionAclMod msg = (CollectionAclMod) this.serializer.fromJson(message, CollectionAclMod.class); String entityPath = convertUUIDToPathForCollection(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; case "collection.sys-metadata.add": { CollectionMetadataAdd msg = (CollectionMetadataAdd) this.serializer.fromJson(message, CollectionMetadataAdd.class); String entityPath = convertUUIDToPathForCollection(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; case "data-object.add": { DataObjectAdd msg = (DataObjectAdd) this.serializer.fromJson(message, DataObjectAdd.class); msg.setEntityPath(msg.getPath()); dsMsg = msg; } break; case "data-object.rm": { DataObjectRm msg = (DataObjectRm) this.serializer.fromJson(message, DataObjectRm.class); msg.setEntityPath(msg.getPath()); dsMsg = msg; } break; case "data-object.mod": { DataObjectMod msg = (DataObjectMod) this.serializer.fromJson(message, DataObjectMod.class); String entityPath = convertUUIDToPathForDataObject(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; case "data-object.mv": { DataObjectMv msg = (DataObjectMv) this.serializer.fromJson(message, DataObjectMv.class); msg.setEntityPath(msg.getNewPath()); dsMsg = msg; } break; case "data-object.acl.mod": { DataObjectAclMod msg = (DataObjectAclMod) this.serializer.fromJson(message, DataObjectAclMod.class); String entityPath = convertUUIDToPathForDataObject(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; case "data-object.sys-metadata.add": { DataObjectMetadataAdd msg = (DataObjectMetadataAdd) this.serializer.fromJson(message, DataObjectMetadataAdd.class); String entityPath = convertUUIDToPathForDataObject(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; case "data-object.sys-metadata.mod": { DataObjectMetadataMod msg = (DataObjectMetadataMod) this.serializer.fromJson(message, DataObjectMetadataMod.class); String entityPath = convertUUIDToPathForDataObject(msg.getEntity()); msg.setEntityPath(entityPath); dsMsg = msg; } break; default: { LOG.info("cannot find datastore data object matching to a message " + routingKey); LOG.info(message); dsMsg = null; } break; } if(dsMsg != null) { // cache uuid-path if necessary if(dsMsg.getEntity() != null && !dsMsg.getEntity().isEmpty() && dsMsg.getEntityPath() != null && !dsMsg.getEntityPath().isEmpty()) { this.uuidCache.cache(dsMsg.getEntity(), dsMsg.getEntityPath()); } // set operation dsMsg.setOperation(routingKey); } return dsMsg; } private String convertUUIDToPathForDataObject(String entity) throws IOException { String cachedPath = this.uuidCache.get(entity); if(cachedPath != null || !cachedPath.isEmpty()) { return cachedPath; } try { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(); String path = datastoreClientInstance.convertUUIDToPathForDataObject(entity); return path; } catch (IOException ex) { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(true); String path = datastoreClientInstance.convertUUIDToPathForDataObject(entity); return path; } } private String convertUUIDToPathForCollection(String entity) throws IOException { String cachedPath = this.uuidCache.get(entity); if(cachedPath != null || !cachedPath.isEmpty()) { return cachedPath; } try { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(); String path = datastoreClientInstance.convertUUIDToPathForCollection(entity); return path; } catch (IOException ex) { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(true); String path = datastoreClientInstance.convertUUIDToPathForCollection(entity); return path; } } private boolean _checkAccessPermissionForCollection(DataStoreClient datastoreClientInstance, String path, String userId) throws IOException { try { AccessPermissionKey accessPermissionKey = new AccessPermissionKey(userId, path); Boolean cachedAccessPermission = this.accessPermissionCache.get(accessPermissionKey); if(cachedAccessPermission != null) { return cachedAccessPermission.booleanValue(); } boolean bAccessPermission = datastoreClientInstance.hasAccessPermissionsForCollection(path, userId); this.accessPermissionCache.cache(accessPermissionKey, bAccessPermission); return bAccessPermission; } catch (FileNotFoundException ex) { String parentPath = PathUtils.getParentPath(path); if(parentPath != null) { return _checkAccessPermissionForCollection(datastoreClientInstance, parentPath, userId); } return false; } } private boolean checkAccessPermissionForCollection(String path, String userId) throws IOException { try { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(); return _checkAccessPermissionForCollection(datastoreClientInstance, path, userId); } catch (IOException ex) { DataStoreClient datastoreClientInstance = this.datastoreClientManager.getDatastoreClientInstance(true); return _checkAccessPermissionForCollection(datastoreClientInstance, path, userId); } } private List<Client> listAcceptedClients(ADataStoreMessage msg) throws IOException { List<Client> clients = this.binder.getClientRegistrar().getAcceptClients(msg); Map<String, Boolean> userAcceptance = new HashMap<String, Boolean>(); List<Client> acceptedClients = new ArrayList<Client>(); String path = PathUtils.getParentPath(msg.getEntityPath()); for(Client client : clients) { Boolean baccept = userAcceptance.get(client.getUserId()); if(baccept == null) { if(checkAccessPermissionForCollection(path, client.getUserId())) { userAcceptance.put(client.getUserId(), true); acceptedClients.add(client); } else { userAcceptance.put(client.getUserId(), false); } } else { if(baccept.booleanValue()) { acceptedClients.add(client); } } } return acceptedClients; } public DataStoreClient getDatastoreClient() throws IOException { return this.datastoreClientManager.getDatastoreClientInstance(true); } @Override public void close() throws IOException { this.datastoreClientManager.close(); this.datastoreClientManager = null; this.uuidCache.clearCache(); this.accessPermissionCache.clearCache(); } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.xmlinputsax; import java.util.ArrayList; import java.util.List; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.DirectoryDialog; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.compatibility.Value; import org.pentaho.di.core.Const; import org.pentaho.di.core.Props; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.TransPreviewFactory; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.steps.xmlinputsax.XMLInputSaxField; import org.pentaho.di.trans.steps.xmlinputsax.XMLInputSaxFieldPosition; import org.pentaho.di.trans.steps.xmlinputsax.XMLInputSaxFieldRetriever; import org.pentaho.di.trans.steps.xmlinputsax.XMLInputSaxMeta; import org.pentaho.di.ui.core.dialog.EnterNumberDialog; import org.pentaho.di.ui.core.dialog.EnterSelectionDialog; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.dialog.PreviewRowsDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.core.widget.TextVar; import org.pentaho.di.ui.trans.dialog.TransPreviewProgressDialog; import org.pentaho.di.ui.trans.step.BaseStepDialog; public class XMLInputSaxDialog extends BaseStepDialog implements StepDialogInterface { private static Class<?> PKG = XMLInputSaxMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private CTabFolder wTabFolder; private FormData fdTabFolder; private CTabItem wFileTab, wContentTab, wFieldsTab; private Composite wFileComp, wContentComp, wFieldsComp; private FormData fdFileComp, fdContentComp, fdFieldsComp; private Label wlFilename; private Button wbbFilename; // Browse: add file or directory private Button wbdFilename; // Delete private Button wbeFilename; // Edit private Button wbaFilename; // Add or change private TextVar wFilename; private FormData fdlFilename, fdbFilename, fdbdFilename, fdbeFilename, fdbaFilename, fdFilename; private Label wlFilenameList; private TableView wFilenameList; private FormData fdlFilenameList, fdFilenameList; private Label wlFilemask; private Text wFilemask; private FormData fdlFilemask, fdFilemask; private Button wbShowFiles; private FormData fdbShowFiles; private Label wlInclFilename; private Button wInclFilename; private FormData fdlInclFilename, fdInclFilename; private Label wlInclFilenameField; private Text wInclFilenameField; private FormData fdlInclFilenameField, fdInclFilenameField; private Label wlInclRownum; private Button wInclRownum; private FormData fdlInclRownum, fdRownum; private Label wlInclRownumField; private Text wInclRownumField; private FormData fdlInclRownumField, fdInclRownumField; private Label wlLimit; private Text wLimit; private FormData fdlLimit, fdLimit; private Label wlPosition; private TableView wPosition; private FormData fdlPosition, fdPosition; private TableView wFields; private FormData fdFields; private TableView wAttributes; private FormData fdAttributes; private XMLInputSaxMeta input; private static final String STRING_PREVIEW_ROWS = BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.PreviewRows.Label"); //$NON-NLS-1$ public static final int dateLengths[] = new int[] { 23, 19, 14, 10, 10, 10, 10, 8, 8, 8, 8, 6, 6 }; public XMLInputSaxDialog(Shell parent, Object in, TransMeta transMeta, String sname) { super(parent, (BaseStepMeta) in, transMeta, sname); input = (XMLInputSaxMeta) in; } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN); props.setLook(shell); setShellImage(shell, input); ModifyListener lsMod = new ModifyListener() { public void modifyText(ModifyEvent e) { input.setChanged(); } }; changed = input.hasChanged(); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout(formLayout); shell.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Shell.Text")); //$NON-NLS-1$ int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line wlStepname = new Label(shell, SWT.RIGHT); wlStepname.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.StepName.Label")); //$NON-NLS-1$ props.setLook(wlStepname); fdlStepname = new FormData(); fdlStepname.left = new FormAttachment(0, 0); fdlStepname.top = new FormAttachment(0, margin); fdlStepname.right = new FormAttachment(middle, -margin); wlStepname.setLayoutData(fdlStepname); wStepname = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wStepname.setText(stepname); props.setLook(wStepname); wStepname.addModifyListener(lsMod); fdStepname = new FormData(); fdStepname.left = new FormAttachment(middle, 0); fdStepname.top = new FormAttachment(0, margin); fdStepname.right = new FormAttachment(100, 0); wStepname.setLayoutData(fdStepname); wTabFolder = new CTabFolder(shell, SWT.BORDER); props.setLook(wTabFolder, Props.WIDGET_STYLE_TAB); // //////////////////////// // START OF FILE TAB /// // //////////////////////// wFileTab = new CTabItem(wTabFolder, SWT.NONE); wFileTab.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.File.Label")); //$NON-NLS-1$ wFileComp = new Composite(wTabFolder, SWT.NONE); props.setLook(wFileComp); FormLayout fileLayout = new FormLayout(); fileLayout.marginWidth = 3; fileLayout.marginHeight = 3; wFileComp.setLayout(fileLayout); // Filename line wlFilename = new Label(wFileComp, SWT.RIGHT); wlFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.FileOrDirectory.Label")); //$NON-NLS-1$ props.setLook(wlFilename); fdlFilename = new FormData(); fdlFilename.left = new FormAttachment(0, 0); fdlFilename.top = new FormAttachment(0, 0); fdlFilename.right = new FormAttachment(middle, -margin); wlFilename.setLayoutData(fdlFilename); wbbFilename = new Button(wFileComp, SWT.PUSH | SWT.CENTER); props.setLook(wbbFilename); wbbFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.System.Button.Browse")); //$NON-NLS-1$ wbbFilename.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.Brows.ToolTip")); //$NON-NLS-1$ fdbFilename = new FormData(); fdbFilename.right = new FormAttachment(100, 0); fdbFilename.top = new FormAttachment(0, 0); wbbFilename.setLayoutData(fdbFilename); wbaFilename = new Button(wFileComp, SWT.PUSH | SWT.CENTER); props.setLook(wbaFilename); wbaFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.AddFile.Label")); //$NON-NLS-1$ wbaFilename.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.AddFile.ToolTip")); //$NON-NLS-1$ fdbaFilename = new FormData(); fdbaFilename.right = new FormAttachment(wbbFilename, -margin); fdbaFilename.top = new FormAttachment(0, 0); wbaFilename.setLayoutData(fdbaFilename); wFilename = new TextVar(transMeta, wFileComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wFilename); wFilename.addModifyListener(lsMod); fdFilename = new FormData(); fdFilename.left = new FormAttachment(middle, 0); fdFilename.right = new FormAttachment(wbaFilename, -margin); fdFilename.top = new FormAttachment(0, 0); wFilename.setLayoutData(fdFilename); wlFilemask = new Label(wFileComp, SWT.RIGHT); wlFilemask.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.FileMaskRegExp.Label")); //$NON-NLS-1$ props.setLook(wlFilemask); fdlFilemask = new FormData(); fdlFilemask.left = new FormAttachment(0, 0); fdlFilemask.top = new FormAttachment(wFilename, margin); fdlFilemask.right = new FormAttachment(middle, -margin); wlFilemask.setLayoutData(fdlFilemask); wFilemask = new Text(wFileComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wFilemask); wFilemask.addModifyListener(lsMod); fdFilemask = new FormData(); fdFilemask.left = new FormAttachment(middle, 0); fdFilemask.top = new FormAttachment(wFilename, margin); fdFilemask.right = new FormAttachment(100, 0); wFilemask.setLayoutData(fdFilemask); // Filename list line wlFilenameList = new Label(wFileComp, SWT.RIGHT); wlFilenameList.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.SelectedFiles.Label")); //$NON-NLS-1$ props.setLook(wlFilenameList); fdlFilenameList = new FormData(); fdlFilenameList.left = new FormAttachment(0, 0); fdlFilenameList.top = new FormAttachment(wFilemask, margin); fdlFilenameList.right = new FormAttachment(middle, -margin); wlFilenameList.setLayoutData(fdlFilenameList); // Buttons to the right of the screen... wbdFilename = new Button(wFileComp, SWT.PUSH | SWT.CENTER); props.setLook(wbdFilename); wbdFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.DeleteEntry.Label")); //$NON-NLS-1$ wbdFilename.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.DeleteEntry.ToolTip")); //$NON-NLS-1$ fdbdFilename = new FormData(); fdbdFilename.right = new FormAttachment(100, 0); fdbdFilename.top = new FormAttachment(wFilemask, 40); wbdFilename.setLayoutData(fdbdFilename); wbeFilename = new Button(wFileComp, SWT.PUSH | SWT.CENTER); props.setLook(wbeFilename); wbeFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.EditEntry.Label")); //$NON-NLS-1$ wbeFilename.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.EditEntry.ToolTip")); //$NON-NLS-1$ fdbeFilename = new FormData(); fdbeFilename.right = new FormAttachment(100, 0); fdbeFilename.top = new FormAttachment(wbdFilename, margin); wbeFilename.setLayoutData(fdbeFilename); wbShowFiles = new Button(wFileComp, SWT.PUSH | SWT.CENTER); props.setLook(wbShowFiles); wbShowFiles.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.ShowFilenames.Label")); //$NON-NLS-1$ fdbShowFiles = new FormData(); fdbShowFiles.left = new FormAttachment(middle, 0); fdbShowFiles.bottom = new FormAttachment(100, 0); wbShowFiles.setLayoutData(fdbShowFiles); ColumnInfo[] colinfo = new ColumnInfo[2]; colinfo[0] = new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.FileDirectory.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false); //$NON-NLS-1$ colinfo[1] = new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Wildcard.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false); //$NON-NLS-1$ colinfo[0].setUsingVariables(true); colinfo[1].setToolTip(BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.FileDirectory.ToolTip")); //$NON-NLS-1$ wFilenameList = new TableView(transMeta, wFileComp, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER, colinfo, 2, lsMod, props); props.setLook(wFilenameList); fdFilenameList = new FormData(); fdFilenameList.left = new FormAttachment(middle, 0); fdFilenameList.right = new FormAttachment(wbdFilename, -margin); fdFilenameList.top = new FormAttachment(wFilemask, margin); fdFilenameList.bottom = new FormAttachment(wbShowFiles, -margin); wFilenameList.setLayoutData(fdFilenameList); fdFileComp = new FormData(); fdFileComp.left = new FormAttachment(0, 0); fdFileComp.top = new FormAttachment(0, 0); fdFileComp.right = new FormAttachment(100, 0); fdFileComp.bottom = new FormAttachment(100, 0); wFileComp.setLayoutData(fdFileComp); wFileComp.layout(); wFileTab.setControl(wFileComp); // /////////////////////////////////////////////////////////// // / END OF FILE TAB // /////////////////////////////////////////////////////////// // //////////////////////// // START OF CONTENT TAB/// // / wContentTab = new CTabItem(wTabFolder, SWT.NONE); wContentTab.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Tab.Content.Label")); //$NON-NLS-1$ FormLayout contentLayout = new FormLayout(); contentLayout.marginWidth = 3; contentLayout.marginHeight = 3; wContentComp = new Composite(wTabFolder, SWT.NONE); props.setLook(wContentComp); wContentComp.setLayout(contentLayout); wlInclFilename = new Label(wContentComp, SWT.RIGHT); wlInclFilename.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.IncludeFilename.Label")); //$NON-NLS-1$ props.setLook(wlInclFilename); fdlInclFilename = new FormData(); fdlInclFilename.left = new FormAttachment(0, 0); fdlInclFilename.top = new FormAttachment(0, 0); fdlInclFilename.right = new FormAttachment(middle, -margin); wlInclFilename.setLayoutData(fdlInclFilename); wInclFilename = new Button(wContentComp, SWT.CHECK); props.setLook(wInclFilename); wInclFilename.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.IncludeFilename.ToolTip")); //$NON-NLS-1$ fdInclFilename = new FormData(); fdInclFilename.left = new FormAttachment(middle, 0); fdInclFilename.top = new FormAttachment(0, 0); wInclFilename.setLayoutData(fdInclFilename); wlInclFilenameField = new Label(wContentComp, SWT.LEFT); wlInclFilenameField.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.FilenameFieldname.Label")); //$NON-NLS-1$ props.setLook(wlInclFilenameField); fdlInclFilenameField = new FormData(); fdlInclFilenameField.left = new FormAttachment(wInclFilename, margin); fdlInclFilenameField.top = new FormAttachment(0, 0); wlInclFilenameField.setLayoutData(fdlInclFilenameField); wInclFilenameField = new Text(wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wInclFilenameField); wInclFilenameField.addModifyListener(lsMod); fdInclFilenameField = new FormData(); fdInclFilenameField.left = new FormAttachment(wlInclFilenameField, margin); fdInclFilenameField.top = new FormAttachment(0, 0); fdInclFilenameField.right = new FormAttachment(100, 0); wInclFilenameField.setLayoutData(fdInclFilenameField); wlInclRownum = new Label(wContentComp, SWT.RIGHT); wlInclRownum.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.RowNumInOutput.Label")); //$NON-NLS-1$ props.setLook(wlInclRownum); fdlInclRownum = new FormData(); fdlInclRownum.left = new FormAttachment(0, 0); fdlInclRownum.top = new FormAttachment(wInclFilenameField, margin); fdlInclRownum.right = new FormAttachment(middle, -margin); wlInclRownum.setLayoutData(fdlInclRownum); wInclRownum = new Button(wContentComp, SWT.CHECK); props.setLook(wInclRownum); wInclRownum.setToolTipText(BaseMessages.getString(PKG, "XMLInputSaxDialog.RowNumInOutput.ToolTip")); //$NON-NLS-1$ fdRownum = new FormData(); fdRownum.left = new FormAttachment(middle, 0); fdRownum.top = new FormAttachment(wInclFilenameField, margin); wInclRownum.setLayoutData(fdRownum); wlInclRownumField = new Label(wContentComp, SWT.RIGHT); wlInclRownumField.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.RowNumInOutputField.Label")); //$NON-NLS-1$ props.setLook(wlInclRownumField); fdlInclRownumField = new FormData(); fdlInclRownumField.left = new FormAttachment(wInclRownum, margin); fdlInclRownumField.top = new FormAttachment(wInclFilenameField, margin); wlInclRownumField.setLayoutData(fdlInclRownumField); wInclRownumField = new Text(wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wInclRownumField); wInclRownumField.addModifyListener(lsMod); fdInclRownumField = new FormData(); fdInclRownumField.left = new FormAttachment(wlInclRownumField, margin); fdInclRownumField.top = new FormAttachment(wInclFilenameField, margin); fdInclRownumField.right = new FormAttachment(100, 0); wInclRownumField.setLayoutData(fdInclRownumField); wlLimit = new Label(wContentComp, SWT.RIGHT); wlLimit.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Limit.Label")); //$NON-NLS-1$ props.setLook(wlLimit); fdlLimit = new FormData(); fdlLimit.left = new FormAttachment(0, 0); fdlLimit.top = new FormAttachment(wInclRownumField, margin); fdlLimit.right = new FormAttachment(middle, -margin); wlLimit.setLayoutData(fdlLimit); wLimit = new Text(wContentComp, SWT.SINGLE | SWT.LEFT | SWT.BORDER); props.setLook(wLimit); wLimit.addModifyListener(lsMod); fdLimit = new FormData(); fdLimit.left = new FormAttachment(middle, 0); fdLimit.top = new FormAttachment(wInclRownumField, margin); fdLimit.right = new FormAttachment(100, 0); wLimit.setLayoutData(fdLimit); String positionHelp = BaseMessages.getString(PKG, "XMLInputSaxDialog.Location.ToolTip"); //$NON-NLS-1$ wlPosition = new Label(wContentComp, SWT.RIGHT); wlPosition.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Location.Label")); //$NON-NLS-1$ wlPosition.setToolTipText(positionHelp); props.setLook(wlPosition); fdlPosition = new FormData(); fdlPosition.left = new FormAttachment(0, 0); fdlPosition.top = new FormAttachment(wLimit, margin); fdlPosition.right = new FormAttachment(middle, -margin); wlPosition.setLayoutData(fdlPosition); ColumnInfo[] locationColumns = new ColumnInfo[] { new ColumnInfo(BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Elements.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false) //$NON-NLS-1$ }; locationColumns[0].setToolTip(positionHelp); int nrElements = input.getInputPosition() != null ? input.getInputPosition().length : 0; wPosition = new TableView(transMeta, wContentComp, SWT.FULL_SELECTION | SWT.MULTI, locationColumns, nrElements, lsMod, props); wPosition.addModifyListener(lsMod); fdPosition = new FormData(); fdPosition.left = new FormAttachment(middle, 0); fdPosition.top = new FormAttachment(wLimit, margin); fdPosition.bottom = new FormAttachment(100, -50); fdPosition.right = new FormAttachment(100, 0); wPosition.setLayoutData(fdPosition); wPosition.setToolTipText(positionHelp); fdContentComp = new FormData(); fdContentComp.left = new FormAttachment(0, 0); fdContentComp.top = new FormAttachment(0, 0); fdContentComp.right = new FormAttachment(100, 0); fdContentComp.bottom = new FormAttachment(100, 0); wContentComp.setLayoutData(fdContentComp); wContentComp.layout(); wContentTab.setControl(wContentComp); // /////////////////////////////////////////////////////////// // / END OF CONTENT TAB // /////////////////////////////////////////////////////////// // Fields tab... // wFieldsTab = new CTabItem(wTabFolder, SWT.NONE); wFieldsTab.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Tab.Fields.Label")); //$NON-NLS-1$ FormLayout fieldsLayout = new FormLayout(); fieldsLayout.marginWidth = Const.FORM_MARGIN; fieldsLayout.marginHeight = Const.FORM_MARGIN; wFieldsComp = new Composite(wTabFolder, SWT.NONE); wFieldsComp.setLayout(fieldsLayout); props.setLook(wFieldsComp); wGet = new Button(wFieldsComp, SWT.PUSH); wGet.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Button.GetFields.Label")); //$NON-NLS-1$ fdGet = new FormData(); fdGet.left = new FormAttachment(50, 0); fdGet.bottom = new FormAttachment(100, 0); wGet.setLayoutData(fdGet); final int FieldsRows = input.getInputFields().length; // Prepare a list of possible formats... String dats[] = Const.getDateFormats(); String nums[] = Const.getNumberFormats(); int totsize = dats.length + nums.length; String formats[] = new String[totsize]; for (int x = 0; x < dats.length; x++) formats[x] = dats[x]; for (int x = 0; x < nums.length; x++) formats[dats.length + x] = nums[x]; ColumnInfo[] colinf = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Name.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Type.Label"), ColumnInfo.COLUMN_TYPE_CCOMBO, Value.getTypes(), true), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Format.Label"), ColumnInfo.COLUMN_TYPE_CCOMBO, formats), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Length.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Precision.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Currency.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Decimal.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Group.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.TrimType.Label"), ColumnInfo.COLUMN_TYPE_CCOMBO, XMLInputSaxField.trimTypeDesc, true), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Repeat.Label"), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "Y", "N" }, true), //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Position.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ }; wFields = new TableView(transMeta, wFieldsComp, SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props); final int AttributesRows = input.getDefinitionLength(); ColumnInfo[] colinfatt = new ColumnInfo[] { new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.Element.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ new ColumnInfo( BaseMessages.getString(PKG, "XMLInputSaxDialog.Column.DefiningAttribute.Label"), ColumnInfo.COLUMN_TYPE_TEXT, false), //$NON-NLS-1$ }; wAttributes = new TableView(transMeta, wFieldsComp, SWT.FULL_SELECTION | SWT.MULTI, colinfatt, AttributesRows, lsMod, props); fdAttributes = new FormData(); fdAttributes.left = new FormAttachment(0, 0); fdAttributes.top = new FormAttachment(0, 0); fdAttributes.right = new FormAttachment(100, 0); fdAttributes.bottom = new FormAttachment(15, 0); wAttributes.setLayoutData(fdAttributes); fdFields = new FormData(); fdFields.left = new FormAttachment(0, 0); fdFields.top = new FormAttachment(wAttributes,margin*3); fdFields.right = new FormAttachment(100, 0); fdFields.bottom = new FormAttachment(wGet, -margin); wFields.setLayoutData(fdFields); fdFieldsComp = new FormData(); fdFieldsComp.left = new FormAttachment(0, 0); fdFieldsComp.top = new FormAttachment(0, 0); fdFieldsComp.right = new FormAttachment(100, 0); fdFieldsComp.bottom = new FormAttachment(110, 0); wFieldsComp.setLayoutData(fdFieldsComp); wFieldsComp.layout(); wFieldsTab.setControl(wFieldsComp); fdTabFolder = new FormData(); fdTabFolder.left = new FormAttachment(0, 0); fdTabFolder.top = new FormAttachment(wStepname, margin); fdTabFolder.right = new FormAttachment(100, 0); fdTabFolder.bottom = new FormAttachment(100, -50); wTabFolder.setLayoutData(fdTabFolder); wOK = new Button(shell, SWT.PUSH); wOK.setText(BaseMessages.getString(PKG, "System.Button.OK")); //$NON-NLS-1$ wPreview = new Button(shell, SWT.PUSH); wPreview.setText(STRING_PREVIEW_ROWS); wCancel = new Button(shell, SWT.PUSH); wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); //$NON-NLS-1$ setButtonPositions(new Button[] { wOK, wPreview, wCancel }, margin, wTabFolder); // Add listeners lsOK = new Listener() { public void handleEvent(Event e) { ok(); } }; lsGet = new Listener() { public void handleEvent(Event e) { get(); } }; lsPreview = new Listener() { public void handleEvent(Event e) { preview(); } }; lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } }; wOK.addListener(SWT.Selection, lsOK); wGet.addListener(SWT.Selection, lsGet); wPreview.addListener(SWT.Selection, lsPreview); wCancel.addListener(SWT.Selection, lsCancel); lsDef = new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } }; wStepname.addSelectionListener(lsDef); wLimit.addSelectionListener(lsDef); wInclRownumField.addSelectionListener(lsDef); wInclFilenameField.addSelectionListener(lsDef); // Add the file to the list of files... SelectionAdapter selA = new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { wFilenameList.add(new String[] { wFilename.getText(), wFilemask.getText() }); wFilename.setText(""); //$NON-NLS-1$ wFilemask.setText(""); //$NON-NLS-1$ wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); wFilenameList.optWidth(true); } }; wbaFilename.addSelectionListener(selA); wFilename.addSelectionListener(selA); // Delete files from the list of files... wbdFilename.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { int idx[] = wFilenameList.getSelectionIndices(); wFilenameList.remove(idx); wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); } }); // Edit the selected file & remove from the list... wbeFilename.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent arg0) { int idx = wFilenameList.getSelectionIndex(); if (idx >= 0) { String string[] = wFilenameList.getItem(idx); wFilename.setText(string[0]); wFilemask.setText(string[1]); wFilenameList.remove(idx); } wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); } }); // Show the files that are selected at this time... wbShowFiles.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { try { XMLInputSaxMeta tfii = new XMLInputSaxMeta(); getInfo(tfii); String files[] = tfii.getFilePaths(transMeta); if (files != null && files.length > 0) { EnterSelectionDialog esd = new EnterSelectionDialog( shell, files, BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.FilesRead.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.FilesRead.Message")); //$NON-NLS-1$ //$NON-NLS-2$ esd.setViewOnly(); esd.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.NoFilesFound.Message")); //$NON-NLS-1$ mb.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.NoFilesFound.Title")); //$NON-NLS-1$ mb.open(); } } catch (KettleException ex) { new ErrorDialog( shell, BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Message"), ex); //$NON-NLS-1$ //$NON-NLS-2$ } } }); // Enable/disable the right fields to allow a filename to be added to // each row... wInclFilename.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { setIncludeFilename(); } }); // Enable/disable the right fields to allow a row number to be added to // each row... wInclRownum.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { setIncludeRownum(); } }); // Whenever something changes, set the tooltip to the expanded version // of the filename: wFilename.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { wFilename.setToolTipText(transMeta.environmentSubstitute(wFilename.getText())); } }); // Listen to the Browse... button wbbFilename.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (wFilemask.getText() != null && wFilemask.getText().length() > 0) // A // mask: // a // directory! { DirectoryDialog dialog = new DirectoryDialog(shell, SWT.OPEN); if (wFilename.getText() != null) { String fpath = transMeta.environmentSubstitute(wFilename.getText()); dialog.setFilterPath(fpath); } if (dialog.open() != null) { String str = dialog.getFilterPath(); wFilename.setText(str); } } else { FileDialog dialog = new FileDialog(shell, SWT.OPEN); dialog.setFilterExtensions(new String[] { "*.xml;*.XML", "*" }); //$NON-NLS-1$ //$NON-NLS-2$ if (wFilename.getText() != null) { String fname = transMeta.environmentSubstitute(wFilename.getText()); dialog.setFileName(fname); } dialog .setFilterNames(new String[] { BaseMessages.getString(PKG, "XMLInputSaxDialog.60"), BaseMessages.getString(PKG, "XMLInputSaxDialog.68") }); //$NON-NLS-1$ //$NON-NLS-2$ if (dialog.open() != null) { String str = dialog.getFilterPath() + System.getProperty("file.separator") + dialog.getFileName(); //$NON-NLS-1$ wFilename.setText(str); } } } }); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener(new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } }); wTabFolder.setSelection(0); // Set the shell size, based upon previous time... setSize(); getData(input); input.setChanged(changed); wFields.optWidth(true); shell.open(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) display.sleep(); } return stepname; } public void setMultiple() { /* * wlFilemask.setEnabled(wMultiple.getSelection()); * wFilemask.setEnabled(wMultiple.getSelection()); * wlFilename.setText(wMultiple.getSelection()?"Directory":"Filename "); */ } public void setIncludeFilename() { wlInclFilenameField.setEnabled(wInclFilename.getSelection()); wInclFilenameField.setEnabled(wInclFilename.getSelection()); } public void setIncludeRownum() { wlInclRownumField.setEnabled(wInclRownum.getSelection()); wInclRownumField.setEnabled(wInclRownum.getSelection()); } /** * Read the data from the XMLvInputMeta object and show it in this dialog. * * @param in * The XMLvInputMeta object to obtain the data from. */ public void getData(XMLInputSaxMeta in) { if (in.getFileName() != null) { wFilenameList.removeAll(); for (int i = 0; i < in.getFileName().length; i++) { wFilenameList.add(new String[] { in.getFileName()[i], in.getFileMask()[i] }); } wFilenameList.removeEmptyRows(); wFilenameList.setRowNums(); wFilenameList.optWidth(true); } wInclFilename.setSelection(in.includeFilename()); wInclRownum.setSelection(in.includeRowNumber()); // wMultiple.setSelection(in.wildcard); if (in.getFilenameField() != null) wInclFilenameField.setText(in.getFilenameField()); if (in.getRowNumberField() != null) wInclRownumField.setText(in.getRowNumberField()); wLimit.setText("" + in.getRowLimit()); //$NON-NLS-1$ logDebug("getting fields info..."); //$NON-NLS-1$ for (int i = 0; i < in.getInputFields().length; i++) { XMLInputSaxField field = in.getInputFields()[i]; if (field != null) { TableItem item = wFields.table.getItem(i); String name = field.getName(); String type = field.getTypeDesc(); String format = field.getFormat(); String length = "" + field.getLength(); //$NON-NLS-1$ String prec = "" + field.getPrecision(); //$NON-NLS-1$ String curr = field.getCurrencySymbol(); String group = field.getGroupSymbol(); String decim = field.getDecimalSymbol(); String trim = field.getTrimTypeDesc(); String rep = field.isRepeated() ? "Y" : "N"; //$NON-NLS-1$ //$NON-NLS-2$ if (name != null) item.setText(1, name); if (type != null) item.setText(2, type); if (format != null) item.setText(3, format); if (length != null && !"-1".equals(length))item.setText(4, length); //$NON-NLS-1$ if (prec != null && !"-1".equals(prec))item.setText(5, prec); //$NON-NLS-1$ if (curr != null) item.setText(6, curr); if (decim != null) item.setText(7, decim); if (group != null) item.setText(8, group); if (trim != null) item.setText(9, trim); if (rep != null) item.setText(10, rep); item.setText(11, field.getFieldPositionsCode()); } } for (int i = 0; i < input.getDefinitionLength(); i++) { TableItem item = wAttributes.table.getItem(i); item.setText(1, input.getDefiningElement(i)); item.setText(2, input.getDefiningAttribute(i)); } for (int i = 0; i < input.getInputPosition().length; i++) { TableItem item = wPosition.table.getItem(i); if (input.getInputPosition()[i] != null) item.setText(1, input.getInputPosition()[i].toString()); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth(true); wPosition.removeEmptyRows(); wPosition.setRowNums(); wPosition.optWidth(true); setMultiple(); setIncludeFilename(); setIncludeRownum(); wStepname.selectAll(); } private void cancel() { stepname = null; input.setChanged(changed); dispose(); } private void ok() { if (Const.isEmpty(wStepname.getText())) return; try { getInfo(input); } catch (KettleException e) { new ErrorDialog( shell, BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Message"), e); //$NON-NLS-1$ //$NON-NLS-2$ } dispose(); } // get metadata from ui to in private void getInfo(XMLInputSaxMeta in) throws KettleException { stepname = wStepname.getText(); // return value // copy info to TextFileInputMeta class (input) in.setRowLimit(Const.toLong(wLimit.getText(), 0L)); in.setFilenameField(wInclFilenameField.getText()); in.setRowNumberField(wInclRownumField.getText()); in.setIncludeFilename(wInclFilename.getSelection()); in.setIncludeRowNumber(wInclRownum.getSelection()); int nrFiles = wFilenameList.getItemCount(); int nrAttributes = wAttributes.nrNonEmpty(); int nrFields = wFields.nrNonEmpty(); int nrPositions = wPosition.nrNonEmpty(); in.allocate(nrFiles, nrFields, nrPositions); in.setFileName(wFilenameList.getItems(0)); in.setFileMask(wFilenameList.getItems(1)); in.clearDefinition(); for (int i = 0; i < nrAttributes; i++) { TableItem item = wAttributes.getNonEmpty(i); in.setDefiningAttribute(item.getText(1), item.getText(2)); } for (int i = 0; i < nrFields; i++) { XMLInputSaxField field = new XMLInputSaxField(); TableItem item = wFields.getNonEmpty(i); field.setName(item.getText(1)); field.setType(Value.getType(item.getText(2))); field.setFormat(item.getText(3)); field.setLength(Const.toInt(item.getText(4), -1)); field.setPrecision(Const.toInt(item.getText(5), -1)); field.setCurrencySymbol(item.getText(6)); field.setDecimalSymbol(item.getText(7)); field.setGroupSymbol(item.getText(8)); field.setTrimType(XMLInputSaxField.getTrimType(item.getText(9))); field.setRepeated("Y".equalsIgnoreCase(item.getText(10))); //$NON-NLS-1$ field.setFieldPosition(item.getText(11)); in.getInputFields()[i] = field; } for (int i = 0; i < nrPositions; i++) { TableItem item = wPosition.getNonEmpty(i); String encode = item.getText(1); in.getInputPosition()[i] = new XMLInputSaxFieldPosition(encode); // System.out.println("Input Position #"+i+" : // "+input.getInputPosition()); } } // private void get() { try { XMLInputSaxMeta meta = new XMLInputSaxMeta(); getInfo(meta); // OK, let's try to walk through the complete tree List<XMLInputSaxField> fields = new ArrayList<XMLInputSaxField>(); // Keep the list of positions String[] filePaths = meta.getFilePaths(transMeta); if (meta.getInputPosition().length==0) { //error MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.NoElements.Message")); //$NON-NLS-1$ mb.setText(BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.NoElements.Title")); //$NON-NLS-1$ mb.open(); return; } for (int f = 0; f < filePaths.length; f++) { XMLInputSaxFieldRetriever fieldRetreiver = new XMLInputSaxFieldRetriever(log, filePaths[f], meta); fields = fieldRetreiver.getFields(); // add the values to the grid... for (int i = 0; i < fields.size(); i++) { XMLInputSaxField iF = (XMLInputSaxField) fields.get(i); TableItem item = new TableItem(wFields.table, SWT.NONE); item.setText(1, iF.getName()); item.setText(2, iF.getTypeDesc()); item.setText(11, iF.getFieldPositionsCode(meta.getInputPosition().length)); } wFields.removeEmptyRows(); wFields.setRowNums(); wFields.optWidth(true); } } catch (KettleException e) { new ErrorDialog( shell, BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorParsingInputData.Message"), e); //$NON-NLS-1$ //$NON-NLS-2$ System.out.println(e.getMessage()); } } // Preview the data private void preview() { try { // Create the XML input step XMLInputSaxMeta oneMeta = new XMLInputSaxMeta(); getInfo(oneMeta); TransMeta previewMeta = TransPreviewFactory.generatePreviewTransformation(transMeta, oneMeta, wStepname .getText()); EnterNumberDialog numberDialog = new EnterNumberDialog( shell, props.getDefaultPreviewSize(), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.EnterPreviewSize.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.EnterPreviewSize.Message")); //$NON-NLS-1$ //$NON-NLS-2$ int previewSize = numberDialog.open(); if (previewSize > 0) { TransPreviewProgressDialog progressDialog = new TransPreviewProgressDialog(shell, previewMeta, new String[] { wStepname.getText() }, new int[] { previewSize }); progressDialog.open(); if (!progressDialog.isCancelled()) { PreviewRowsDialog prd = new PreviewRowsDialog(shell, transMeta, SWT.NONE, wStepname.getText(), progressDialog.getPreviewRowsMeta(wStepname.getText()), progressDialog.getPreviewRows(wStepname.getText())); prd.open(); } } } catch (KettleException e) { new ErrorDialog( shell, BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorDisplayingPreviewData.Title"), BaseMessages.getString(PKG, "XMLInputSaxDialog.Dialog.ErrorDisplayingPreviewData.Message"), e); //$NON-NLS-1$ //$NON-NLS-2$ } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.gui.action; import java.awt.event.ActionEvent; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashSet; import java.util.Set; import javax.swing.JFileChooser; import javax.swing.JTree; import javax.swing.tree.TreePath; import org.apache.jmeter.exceptions.IllegalUserActionException; import org.apache.jmeter.gui.GuiPackage; import org.apache.jmeter.gui.tree.JMeterTreeNode; import org.apache.jmeter.gui.util.FileDialoger; import org.apache.jmeter.gui.util.FocusRequester; import org.apache.jmeter.gui.util.MenuFactory; import org.apache.jmeter.save.SaveService; import org.apache.jmeter.services.FileServer; import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.TestPlan; import org.apache.jmeter.testelement.WorkBench; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.collections.HashTree; import org.apache.jorphan.logging.LoggingManager; import org.apache.jorphan.util.JOrphanUtils; import org.apache.log.Logger; import com.thoughtworks.xstream.converters.ConversionException; /** * Handles the Open (load a new file) and Merge commands. * */ public class Load implements Command { private static final Logger log = LoggingManager.getLoggerForClass(); private static final boolean expandTree = JMeterUtils.getPropDefault("onload.expandtree", false); //$NON-NLS-1$ private static final Set<String> commands = new HashSet<String>(); static { commands.add(ActionNames.OPEN); commands.add(ActionNames.MERGE); } public Load() { super(); } @Override public Set<String> getActionNames() { return commands; } @Override public void doAction(final ActionEvent e) { final JFileChooser chooser = FileDialoger.promptToOpenFile(new String[] { ".jmx" }); //$NON-NLS-1$ if (chooser == null) { return; } final File selectedFile = chooser.getSelectedFile(); if(selectedFile != null) { final boolean merging = e.getActionCommand().equals(ActionNames.MERGE); // We must ask the user if it is ok to close current project if(!merging) { // i.e. it is OPEN if (!Close.performAction(e)) { return; } } loadProjectFile(e, selectedFile, merging); } } /** * Loads or merges a file into the current GUI, reporting any errors to the user. * If the file is a complete test plan, sets the GUI test plan file name * * @param e the event that triggered the action * @param f the file to load * @param merging if true, then try to merge the file into the current GUI. */ static void loadProjectFile(final ActionEvent e, final File f, final boolean merging) { loadProjectFile(e, f, merging, true); } /** * Loads or merges a file into the current GUI, reporting any errors to the user. * If the file is a complete test plan, sets the GUI test plan file name * * @param e the event that triggered the action * @param f the file to load * @param merging if true, then try to merge the file into the current GUI. * @param setDetails if true, then set the file details (if not merging) */ static void loadProjectFile(final ActionEvent e, final File f, final boolean merging, final boolean setDetails) { ActionRouter.getInstance().doActionNow(new ActionEvent(e.getSource(), e.getID(), ActionNames.STOP_THREAD)); final GuiPackage guiPackage = GuiPackage.getInstance(); if (f != null) { InputStream reader = null; try { if (merging) { log.info("Merging file: " + f); } else { log.info("Loading file: " + f); // TODO should this be done even if not a full test plan? // and what if load fails? if(setDetails) { FileServer.getFileServer().setBaseForScript(f); } } reader = new FileInputStream(f); final HashTree tree = SaveService.loadTree(reader); final boolean isTestPlan = insertLoadedTree(e.getID(), tree, merging); // don't change name if merging if (!merging && isTestPlan && setDetails) { // TODO should setBaseForScript be called here rather than above? guiPackage.setTestPlanFile(f.getAbsolutePath()); } } catch (NoClassDefFoundError ex) {// Allow for missing optional jars reportError("Missing jar file", ex, true); } catch (ConversionException ex) { log.warn("Could not convert file "+ex); JMeterUtils.reportErrorToUser(SaveService.CEtoString(ex)); } catch (IOException ex) { reportError("Error reading file: ", ex, false); } catch (Exception ex) { reportError("Unexpected error", ex, true); } finally { JOrphanUtils.closeQuietly(reader); } guiPackage.updateCurrentGui(); guiPackage.getMainFrame().repaint(); } } /** * Inserts (or merges) the tree into the GUI. * Does not check if the previous tree has been saved. * Clears the existing GUI test plan if we are inserting a complete plan. * @param id the id for the ActionEvent that is created * @param tree the tree to load * @param merging true if the tree is to be merged; false if it is to replace the existing tree * @return true if the loaded tree was a full test plan * @throws IllegalUserActionException if the tree cannot be merged at the selected position or the tree is empty */ // Does not appear to be used externally; called by #loadProjectFile() public static boolean insertLoadedTree(final int id, final HashTree tree, final boolean merging) throws IllegalUserActionException { // convertTree(tree); if (tree == null) { throw new IllegalUserActionException("Empty TestPlan or error reading test plan - see log file"); } final boolean isTestPlan = tree.getArray()[0] instanceof TestPlan; // If we are loading a new test plan, initialize the tree with the testplan node we are loading final GuiPackage guiInstance = GuiPackage.getInstance(); if(isTestPlan && !merging) { // Why does this not call guiInstance.clearTestPlan() ? // Is there a reason for not clearing everything? guiInstance.clearTestPlan((TestElement)tree.getArray()[0]); } if (merging){ // Check if target of merge is reasonable final TestElement te = (TestElement)tree.getArray()[0]; if (!(te instanceof WorkBench || te instanceof TestPlan)){// These are handled specially by addToTree final boolean ok = MenuFactory.canAddTo(guiInstance.getCurrentNode(), te); if (!ok){ String name = te.getName(); String className = te.getClass().getName(); className = className.substring(className.lastIndexOf('.')+1); throw new IllegalUserActionException("Can't merge "+name+" ("+className+") here"); } } } final HashTree newTree = guiInstance.addSubTree(tree); guiInstance.updateCurrentGui(); guiInstance.getMainFrame().getTree().setSelectionPath( new TreePath(((JMeterTreeNode) newTree.getArray()[0]).getPath())); final HashTree subTree = guiInstance.getCurrentSubTree(); // Send different event wether we are merging a test plan into another test plan, // or loading a testplan from scratch ActionEvent actionEvent = new ActionEvent(subTree.get(subTree.getArray()[subTree.size() - 1]), id, merging ? ActionNames.SUB_TREE_MERGED : ActionNames.SUB_TREE_LOADED); ActionRouter.getInstance().actionPerformed(actionEvent); final JTree jTree = guiInstance.getMainFrame().getTree(); if (expandTree && !merging) { // don't automatically expand when merging for(int i = 0; i < jTree.getRowCount(); i++) { jTree.expandRow(i); } } else { jTree.expandRow(0); } jTree.setSelectionPath(jTree.getPathForRow(1)); FocusRequester.requestFocus(jTree); return isTestPlan; } /** * Inserts the tree into the GUI. * Does not check if the previous tree has been saved. * Clears the existing GUI test plan if we are inserting a complete plan. * @param id the id for the ActionEvent that is created * @param tree the tree to load * @return true if the loaded tree was a full test plan * @throws IllegalUserActionException if the tree cannot be merged at the selected position or the tree is empty */ // Called by JMeter#startGui() public static boolean insertLoadedTree(final int id, final HashTree tree) throws IllegalUserActionException { return insertLoadedTree(id, tree, false); } // Helper method to simplify code private static void reportError(final String reason, final Throwable ex, final boolean stackTrace) { if (stackTrace) { log.warn(reason, ex); } else { log.warn(reason + ex); } String msg = ex.getMessage(); if (msg == null) { msg = "Unexpected error - see log for details"; } JMeterUtils.reportErrorToUser(msg); } }
package seedu.taskmaster.ui; import java.io.IOException; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.MenuItem; import javafx.scene.input.KeyCombination; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.VBox; import javafx.stage.Stage; import seedu.taskmaster.commons.core.Config; import seedu.taskmaster.commons.core.GuiSettings; import seedu.taskmaster.commons.events.ui.ExitAppRequestEvent; import seedu.taskmaster.logic.Logic; import seedu.taskmaster.model.UserPrefs; import seedu.taskmaster.model.task.ReadOnlyTask; /** * The Main Window. Provides the basic application layout containing * a menu bar and space where other JavaFX elements can be placed. */ public class MainWindow extends UiPart { private static final String ICON = "/images/address_book_32.png"; private static final String FXML = "MainWindow.fxml"; public static final int MIN_HEIGHT = 870; public static final int MIN_WIDTH = 1545; private final String DARK_THEME = getClass().getResource("/view/DarkTheme.css").toExternalForm(); private final String AGENDA = getClass().getResource("/view/MyAgenda.css").toExternalForm(); private Logic logic; // Independent Ui parts residing in this Ui container private BrowserPanel browserPanel; private NavbarPanel navbarPanel; private TaskListPanel taskListPanel; private ResultDisplay resultDisplay; private CommandBox commandBox; private Config config; // Handles to elements of this Ui container private VBox rootLayout; private Scene scene; @FXML private AnchorPane browserPlaceholder; @FXML private AnchorPane commandBoxPlaceholder; @FXML private MenuItem helpMenuItem; @FXML private AnchorPane navbarPanelPlaceholder; @FXML private AnchorPane taskListPanelPlaceholder; @FXML private AnchorPane resultDisplayPlaceholder; @FXML private AnchorPane statusbarPlaceholder; public MainWindow() { super(); } @Override public void setNode(Node node) { rootLayout = (VBox) node; } @Override public String getFxmlPath() { return FXML; } public static MainWindow load(Stage primaryStage, Config config, UserPrefs prefs, Logic logic) { MainWindow mainWindow = UiPartLoader.loadUiPart(primaryStage, new MainWindow()); mainWindow.configure(config.getAppTitle(), config.getTaskListName(), config, prefs, logic); return mainWindow; } private void configure(String appTitle, String taskListName, Config config, UserPrefs prefs, Logic logic) { //Set dependencies this.logic = logic; this.config = config; //Configure the UI setTitle(appTitle); setIcon(ICON); setWindowMinSize(); setWindowDefaultSize(prefs); scene = new Scene(rootLayout); scene.getStylesheets().add(DARK_THEME); scene.getStylesheets().add(AGENDA); primaryStage.setScene(scene); setAccelerators(); } private void setAccelerators() { helpMenuItem.setAccelerator(KeyCombination.valueOf("F1")); } void fillInnerParts() { browserPanel = BrowserPanel.load(primaryStage, getBrowserPanelPlaceholder(), logic.getFilteredTaskList()); navbarPanel = NavbarPanel.load(primaryStage, getNavbarPlaceholder()); taskListPanel = TaskListPanel.load(primaryStage, getTaskListPlaceholder(), logic.getFilteredTaskList()); resultDisplay = ResultDisplay.load(primaryStage, getResultDisplayPlaceholder()); StatusBarFooter.load(primaryStage, getStatusbarPlaceholder(), config.getTaskListFilePath()); commandBox = CommandBox.load(primaryStage, getCommandBoxPlaceholder(), resultDisplay, logic); } private AnchorPane getBrowserPanelPlaceholder() { return browserPlaceholder; } private AnchorPane getCommandBoxPlaceholder() { return commandBoxPlaceholder; } private AnchorPane getStatusbarPlaceholder() { return statusbarPlaceholder; } private AnchorPane getResultDisplayPlaceholder() { return resultDisplayPlaceholder; } public AnchorPane getNavbarPlaceholder() { return navbarPanelPlaceholder; } public AnchorPane getTaskListPlaceholder() { return taskListPanelPlaceholder; } public void hide() { primaryStage.hide(); } private void setTitle(String appTitle) { primaryStage.setTitle(appTitle); } /** * Sets the default size based on user preferences. */ protected void setWindowDefaultSize(UserPrefs prefs) { primaryStage.setHeight(prefs.getGuiSettings().getWindowHeight()); primaryStage.setWidth(prefs.getGuiSettings().getWindowWidth()); if (prefs.getGuiSettings().getWindowCoordinates() != null) { primaryStage.setX(prefs.getGuiSettings().getWindowCoordinates().getX()); primaryStage.setY(prefs.getGuiSettings().getWindowCoordinates().getY()); } } private void setWindowMinSize() { primaryStage.setMinHeight(MIN_HEIGHT); primaryStage.setMinWidth(MIN_WIDTH); } /** * Returns the current size and the position of the main Window. */ public GuiSettings getCurrentGuiSetting() { return new GuiSettings(primaryStage.getWidth(), primaryStage.getHeight(), (int) primaryStage.getX(), (int) primaryStage.getY()); } @FXML public void handleHelp() throws IOException { HelpWindow helpWindow = HelpWindow.load(primaryStage); helpWindow.show(); } public void show() { primaryStage.show(); } /** * Closes the application. */ @FXML private void handleExit() { raise(new ExitAppRequestEvent()); } public NavbarPanel getNavbarPanel() { return this.navbarPanel; } public CommandBox getCommandBox() { return this.commandBox; } public TaskListPanel getTaskListPanel() { return this.taskListPanel; } public BrowserPanel getBrowserPanel() { return this.browserPanel; } public void loadTaskPage(ReadOnlyTask task) { browserPanel.loadTaskPage(task); } public void releaseResources() { browserPanel.freeResources(); } //@@author A0147967J public void switchToInitialTab() { logic.execute("view today"); logic.initializeUndoRedoManager(); } }
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.transform.wavelet; import boofcv.core.image.border.BorderIndex1D; import boofcv.core.image.border.BorderIndex1D_Reflect; import boofcv.core.image.border.BorderIndex1D_Wrap; import boofcv.core.image.border.BorderType; import boofcv.struct.image.*; import boofcv.struct.wavelet.WlBorderCoef; import boofcv.struct.wavelet.WlCoef; /** * Various functions which are useful when working with or computing wavelet transforms. * * @author Peter Abeles */ public class UtilWavelet { /** * The original image can have an even or odd number of width/height. While the transformed * image must have an even number of pixels. If the original image is even then the sames * are the same, otherwise the transformed image's shape is rounded up. * * @param original Original input image. * @param transformed Image which has been transformed. */ public static void checkShape( ImageSingleBand original , ImageSingleBand transformed ) { if( transformed.width % 2 == 1 || transformed.height % 2 == 1 ) throw new IllegalArgumentException("Image containing the wavelet transform must have an even width and height."); int w = original.width + original.width%2; int h = original.height + original.height%2; if( transformed.width < w || transformed.height < h) throw new IllegalArgumentException("Transformed image must be larger than the original image. " + "("+w+","+h+") vs ("+transformed.width+","+transformed.height+")"); } public static void checkShape( WlCoef desc , ImageSingleBand original , ImageSingleBand transformed , int level ) { ImageDimension tranDim = UtilWavelet.transformDimension(original,level); if( transformed.width != tranDim.width || transformed.height != tranDim.height ) { throw new IllegalArgumentException("Image containing the wavelet transform must be "+tranDim.width+" x "+tranDim.height); } if( original.width < desc.getScalingLength() || original.height < desc.getScalingLength() ) throw new IllegalArgumentException("Original image's width and height must be large enough the number of scaling coefficients."); if( original.width < desc.getWaveletLength() || original.height < desc.getWaveletLength() ) throw new IllegalArgumentException("Original image's width and height must be large enough the number of wavelet coefficients."); } public static int computeScale( int level ) { if( level <= 1 ) return 1; return (int)Math.pow(2,level-1); } /** * Returns the number that the output image needs to be divisible by. */ public static int computeDiv( int level ) { if( level <= 1 ) return 2; return (int)Math.pow(2,level-1); } /** * Returns dimension which is required for the transformed image in a multilevel * wavelet transform. */ public static ImageDimension transformDimension( ImageBase orig , int level ) { return transformDimension(orig.width,orig.height,level); } public static ImageDimension transformDimension( int width , int height , int level ) { int div = computeDiv(level); int w = width%div; int h = height%div; width += w > 0 ? div-w : 0; height += h > 0 ? div-h : 0; return new ImageDimension(width,height); } /** * <p> * Compute the energy of the specified array. * </p> * * <p> * E = sum( i=1..N , a[i]*a[i] ) * </p> */ public static double computeEnergy( float []array ) { double total = 0; for( int i = 0; i < array.length; i++ ) { total += array[i]*array[i]; } return total; } /** * <p> * Compute the energy of the specified array. * </p> * * <p> * E = sum( i=1..N , a[i]*a[i] ) / (N*d*d) * </p> */ public static double computeEnergy( int []array , int denominator) { double total = 0; for( int i = 0; i < array.length; i++ ) { total += array[i]*array[i]; } total /= denominator*denominator; return total; } public static double sumCoefficients( float []array ) { double total = 0; for( int i = 0; i < array.length; i++ ) { total += array[i]; } return total; } public static int sumCoefficients( int []array ) { int total = 0; for( int i = 0; i < array.length; i++ ) { total += array[i]; } return total; } /** * Returns the lower border for a forward wavelet transform. */ public static int borderForwardLower( WlCoef desc ) { int ret = -Math.min(desc.offsetScaling,desc.offsetWavelet); return ret + (ret % 2); } /** * Returns the upper border (offset from image edge) for a forward wavelet transform. */ public static int borderForwardUpper( WlCoef desc , int dataLength) { int w = Math.max( desc.offsetScaling+desc.getScalingLength() , desc.offsetWavelet+desc.getWaveletLength()); int a = dataLength%2; w -= a; return Math.max((w + (w%2))-2,0)+a; } /** * Returns the lower border for an inverse wavelet transform. */ public static int borderInverseLower( WlBorderCoef<?> desc, BorderIndex1D border ) { WlCoef inner = desc.getInnerCoefficients(); int borderSize = borderForwardLower(inner); WlCoef ll = borderSize > 0 ? inner : null; WlCoef lu = ll; WlCoef uu = inner; int indexLU = 0; if( desc.getLowerLength() > 0 ) { ll = desc.getBorderCoefficients(0); indexLU = desc.getLowerLength()*2-2; lu = desc.getBorderCoefficients(indexLU); } if( desc.getUpperLength() > 0 ) { uu = desc.getBorderCoefficients(-2); } border.setLength(2000); borderSize = checkInverseLower(ll,0,border,borderSize); borderSize = checkInverseLower(lu,indexLU,border,borderSize); borderSize = checkInverseLower(uu,1998,border,borderSize); return borderSize; } public static int checkInverseLower( WlCoef coef, int index , BorderIndex1D border , int current ) { if( coef == null ) return current; // how far up and down the coefficients go int a = index + Math.max( coef.getScalingLength()+coef.offsetScaling,coef.getWaveletLength()+coef.offsetScaling); int b = index + Math.min( coef.offsetScaling , coef.offsetWavelet ) -1; // the above -1 is needed because the lower bound is becoming an upper bound. // lower bounds are inclusive and upper bounds are exclusive // take in account the border a = border.getIndex(a); b = border.getIndex(b); if( a > 1000 ) a = -1; if( b > 1000 ) b = -1; a = Math.max(a,b); a += a%2; return Math.max(a,current); } /** * Returns the upper border (offset from image edge) for an inverse wavelet transform. */ public static int borderInverseUpper( WlBorderCoef<?> desc , BorderIndex1D border, int dataLength ) { WlCoef inner = desc.getInnerCoefficients(); int borderSize = borderForwardUpper(inner,dataLength); borderSize += borderSize%2; WlCoef uu = borderSize > 0 ? inner : null; WlCoef ul = uu; WlCoef ll = inner; int indexUL = 1998; if( desc.getUpperLength() > 0 ) { uu = desc.getBorderCoefficients(-2); indexUL = 2000-desc.getUpperLength()*2; ul = desc.getBorderCoefficients(2000-indexUL); } if( desc.getLowerLength() > 0 ) { ll = desc.getBorderCoefficients(0); } border.setLength(2000); borderSize = checkInverseUpper(uu,2000-borderSize,border,borderSize); borderSize = checkInverseUpper(ul,indexUL,border,borderSize); borderSize = checkInverseUpper(ll,0,border,borderSize); return borderSize; } public static int checkInverseUpper( WlCoef coef, int index , BorderIndex1D border , int current ) { if( coef == null ) return current; // how far up and down the coefficients go int a = index + Math.max( coef.getScalingLength()+coef.offsetScaling,coef.getWaveletLength()+coef.offsetScaling)-1; int b = index + Math.min( coef.offsetScaling , coef.offsetWavelet ); // the plus 1 for 'a' is needed because the lower bound is inclusive not exclusive // take in account the border a = border.getIndex(a); b = border.getIndex(b); if( a < 1000 ) a = 10000; if( b < 1000 ) b = 10000; a = 2000-Math.min(a,b); a += a%2; return Math.max(a,current); } /** * Specialized rounding for use with integer wavelet transform. * * return (top +- div2) / divisor; * * @param top Top part of the equation. * @param div2 The divisor divided by two. * @param divisor The divisor. * @return */ public static int round( int top , int div2 , int divisor ) { if( top > 0 ) return (top + div2)/divisor; else return (top - div2)/divisor; } public static BorderType convertToType( BorderIndex1D b ) { if( b instanceof BorderIndex1D_Reflect) { return BorderType.REFLECT; } else if( b instanceof BorderIndex1D_Wrap) { return BorderType.WRAP; } else { throw new RuntimeException("Unknown border type: "+b.getClass().getSimpleName()); } } /** * Adjusts the values inside a wavelet transform to make it easier to view. * * @param transform * @param numLevels Number of levels in the transform */ public static void adjustForDisplay( ImageSingleBand transform , int numLevels , double valueRange ) { if( transform instanceof ImageFloat32 ) adjustForDisplay((ImageFloat32)transform,numLevels,(float)valueRange); else adjustForDisplay((ImageInteger)transform,numLevels,(int)valueRange); } private static void adjustForDisplay( ImageFloat32 transform , int numLevels , float valueRange ) { int div = (int)Math.pow(2,numLevels); int minX = 0; int minY = 0; while( div >= 1 ) { int maxX = transform.width/div; int maxY = transform.height/div; float max = 0; for( int y = 0; y < maxY; y++ ) { for( int x = 0; x < maxX; x++ ) { if( x >= minX || y >= minY ) { float val = Math.abs(transform.data[ transform.getIndex(x,y) ]); max = Math.max(val,max); } } } for( int y = 0; y < maxY; y++ ) { for( int x = 0; x < maxX; x++ ) { if( x >= minX || y >= minY ) { transform.data[ transform.getIndex(x,y) ] *= valueRange/max; } } } minX = maxX; minY = maxY; div /= 2; } } private static void adjustForDisplay( ImageInteger transform , int numLevels , int valueRange ) { int div = (int)Math.pow(2,numLevels); int minX = 0; int minY = 0; while( div >= 1 ) { int maxX = transform.width/div; int maxY = transform.height/div; int max = 0; for( int y = 0; y < maxY; y++ ) { for( int x = 0; x < maxX; x++ ) { if( x >= minX || y >= minY ) { int val = Math.abs(transform.get(x,y)); max = Math.max(val,max); } } } for( int y = 0; y < maxY; y++ ) { for( int x = 0; x < maxX; x++ ) { if( x >= minX || y >= minY ) { int val = transform.get(x,y); transform.set( x,y,val * valueRange/max); } } } minX = maxX; minY = maxY; div /= 2; } } }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2019 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.mcp.impl.processes; import com.adobe.acs.commons.fam.ActionManager; import com.adobe.acs.commons.mcp.ProcessDefinition; import com.adobe.acs.commons.mcp.ProcessInstance; import com.adobe.acs.commons.mcp.form.FormField; import com.adobe.acs.commons.mcp.form.SelectComponent; import com.adobe.acs.commons.mcp.form.TextareaComponent; import com.adobe.acs.commons.mcp.form.workflow.WorkflowModelSelector; import com.adobe.acs.commons.mcp.model.GenericReport; import com.adobe.acs.commons.mcp.util.StringUtil; import com.adobe.acs.commons.util.QueryHelper; import com.adobe.acs.commons.util.impl.QueryHelperImpl; import com.adobe.acs.commons.workflow.synthetic.SyntheticWorkflowModel; import com.adobe.acs.commons.workflow.synthetic.SyntheticWorkflowRunner; import com.day.cq.workflow.WorkflowException; import org.apache.sling.api.resource.LoginException; import org.apache.sling.api.resource.PersistenceException; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.jcr.query.Query; import java.io.Serializable; import java.util.ArrayList; import java.util.EnumMap; import java.util.List; public class BulkWorkflow extends ProcessDefinition implements Serializable { private static final Logger log = LoggerFactory.getLogger(BulkWorkflow.class); public static final String PROCESS_NAME = "Bulk Workflow"; private final transient QueryHelper queryHelper; private final transient SyntheticWorkflowRunner syntheticWorkflowRunner; public enum ItemStatus { SUCCESS, FAILURE } public enum QueryLanguage { QUERY_BUILDER(QueryHelperImpl.QUERY_BUILDER), LIST(QueryHelperImpl.LIST), @SuppressWarnings("deprecation") XPATH(Query.XPATH), JCR_SQL2(Query.JCR_SQL2), JCR_SQL("JCR-SQL"); private String value; QueryLanguage(String value) { this.value = value; } public String getValue() { return this.value; } } public enum ReportColumns { PAYLOAD_PATH, TIME_TAKEN_IN_MILLISECONDS, STATUS } @FormField( name = "Workflow model", description = "The workflow model to execute. This workflow model MUST be compatible with ACS AEM Commons Synthetic Workflow.", component = WorkflowModelSelector.class, options = {"required"} ) public String workflowId = ""; @FormField( name = "Query language", description = "", component = SelectComponent.EnumerationSelector.class, options = {"default=QUERY_BUILDER", "required"} ) public QueryLanguage queryLanguage = QueryLanguage.QUERY_BUILDER; @FormField( name = "Query statement", description = "Ensure that this query is correct prior to submitting form as it will collect the resources for processing which can be an expensive operation for large bulk workflow processes.", component = TextareaComponent.class, options = {"required"} ) public String queryStatement = ""; @FormField( name = "Relative path", description = "This can be used to select otherwise difficult to search for resources. Examples: jcr:content/renditions/original OR ../renditions/original" ) public String relativePayloadPath = ""; private final transient GenericReport report = new GenericReport(); private final transient List<EnumMap<ReportColumns, Object>> reportRows = new ArrayList<>(); private transient List<Resource> payloads; private transient SyntheticWorkflowModel syntheticWorkflowModel; public BulkWorkflow(final QueryHelper queryHelper, final SyntheticWorkflowRunner syntheticWorkflowRunner) { this.queryHelper = queryHelper; this.syntheticWorkflowRunner = syntheticWorkflowRunner; } @Override public void buildProcess(ProcessInstance instance, ResourceResolver rr) throws LoginException { report.setName(instance.getName()); instance.getInfo().setDescription("Bulk process payloads using synthetic workflow"); instance.defineCriticalAction("Process payloads with synthetic workflow", rr, this::processPayloads); } protected void queryPayloads(ActionManager manager) throws Exception { manager.withResolver(resourceResolver -> { payloads = queryHelper.findResources(resourceResolver, queryLanguage.getValue(), queryStatement, relativePayloadPath); }); } protected void prepareSyntheticWorkflowModel(ActionManager manager) throws Exception { manager.withResolver(resourceResolver -> { syntheticWorkflowModel = syntheticWorkflowRunner.getSyntheticWorkflowModel( resourceResolver, workflowId, true); }); } public void processPayloads(ActionManager manager) throws Exception { prepareSyntheticWorkflowModel(manager); queryPayloads(manager); log.info("Executing synthetic workflow [ {} ] against [ {} ] payloads via Bulk Workflow MCP process.", workflowId, payloads.size()); payloads.stream() .map((resource) -> resource.getPath()) .forEach((path) -> manager.deferredWithResolver((ResourceResolver resourceResolver) -> { final long start = System.currentTimeMillis(); resourceResolver.adaptTo(Session.class).getWorkspace().getObservationManager().setUserData("changedByWorkflowProcess"); try { syntheticWorkflowRunner.execute(resourceResolver, path, syntheticWorkflowModel, false, true); final long duration = System.currentTimeMillis() - start; record(path, ItemStatus.SUCCESS, duration); log.debug("Successfully processed payload [ {} ] with synthetic workflow [ {} ] in [ {} ] milliseconds.", path, workflowId, duration); } catch (WorkflowException e) { final long duration = System.currentTimeMillis() - start; record(path, ItemStatus.FAILURE, duration); log.warn("Failed to process payload [ {} ] with synthetic workflow [ {} ] in [ {} ] milliseconds.", path, workflowId, duration); } })); } public GenericReport getReport() { return report; } @Override public void init() throws RepositoryException { // nothing to do here } protected void record(String path, ItemStatus status, long timeTaken) { final EnumMap<ReportColumns, Object> row = new EnumMap<>(ReportColumns.class); row.put(ReportColumns.PAYLOAD_PATH, path); row.put(ReportColumns.STATUS, StringUtil.getFriendlyName(status.name())); row.put(ReportColumns.TIME_TAKEN_IN_MILLISECONDS, timeTaken); reportRows.add(row); } @Override public void storeReport(ProcessInstance instance, ResourceResolver resourceResolver) throws RepositoryException, PersistenceException { report.setRows(reportRows, ReportColumns.class); report.persist(resourceResolver, instance.getPath() + "/jcr:content/report"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.Config; import org.apache.kafka.clients.admin.LogDirDescription; import org.apache.kafka.clients.admin.ReplicaInfo; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.config.ConfigResource; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.utils.Time; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.test.IntegrationTest; import org.apache.kafka.test.MockMapper; import org.apache.kafka.test.TestCondition; import org.apache.kafka.test.TestUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.Set; @Category({IntegrationTest.class}) public class PurgeRepartitionTopicIntegrationTest { private static final int NUM_BROKERS = 1; private static final String INPUT_TOPIC = "input-stream"; private static final String APPLICATION_ID = "restore-test"; private static final String REPARTITION_TOPIC = APPLICATION_ID + "-KSTREAM-AGGREGATE-STATE-STORE-0000000002-repartition"; private static Admin adminClient; private static KafkaStreams kafkaStreams; private static final Integer PURGE_INTERVAL_MS = 10; private static final Integer PURGE_SEGMENT_BYTES = 2000; public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS, new Properties() { { put("log.retention.check.interval.ms", PURGE_INTERVAL_MS); put(TopicConfig.FILE_DELETE_DELAY_MS_CONFIG, 0); } }); @BeforeClass public static void startCluster() throws IOException, InterruptedException { CLUSTER.start(); CLUSTER.createTopic(INPUT_TOPIC, 1, 1); } @AfterClass public static void closeCluster() { CLUSTER.stop(); } private final Time time = CLUSTER.time; private class RepartitionTopicCreatedWithExpectedConfigs implements TestCondition { @Override final public boolean conditionMet() { try { final Set<String> topics = adminClient.listTopics().names().get(); if (!topics.contains(REPARTITION_TOPIC)) { return false; } } catch (final Exception e) { return false; } try { final ConfigResource resource = new ConfigResource(ConfigResource.Type.TOPIC, REPARTITION_TOPIC); final Config config = adminClient .describeConfigs(Collections.singleton(resource)) .values() .get(resource) .get(); return config.get(TopicConfig.CLEANUP_POLICY_CONFIG).value().equals(TopicConfig.CLEANUP_POLICY_DELETE) && config.get(TopicConfig.SEGMENT_MS_CONFIG).value().equals(PURGE_INTERVAL_MS.toString()) && config.get(TopicConfig.SEGMENT_BYTES_CONFIG).value().equals(PURGE_SEGMENT_BYTES.toString()); } catch (final Exception e) { return false; } } } private interface TopicSizeVerifier { boolean verify(long currentSize); } private class RepartitionTopicVerified implements TestCondition { private final TopicSizeVerifier verifier; RepartitionTopicVerified(final TopicSizeVerifier verifier) { this.verifier = verifier; } @Override public final boolean conditionMet() { time.sleep(PURGE_INTERVAL_MS); try { final Collection<LogDirDescription> logDirInfo = adminClient.describeLogDirs(Collections.singleton(0)).descriptions().get(0).get().values(); for (final LogDirDescription partitionInfo : logDirInfo) { final ReplicaInfo replicaInfo = partitionInfo.replicaInfos().get(new TopicPartition(REPARTITION_TOPIC, 0)); if (replicaInfo != null && verifier.verify(replicaInfo.size())) { return true; } } } catch (final Exception e) { // swallow } return false; } } @Before public void setup() { // create admin client for verification final Properties adminConfig = new Properties(); adminConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); adminClient = Admin.create(adminConfig); final Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, APPLICATION_ID); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, PURGE_INTERVAL_MS); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory(APPLICATION_ID).getPath()); streamsConfiguration.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_MS_CONFIG), PURGE_INTERVAL_MS); streamsConfiguration.put(StreamsConfig.topicPrefix(TopicConfig.SEGMENT_BYTES_CONFIG), PURGE_SEGMENT_BYTES); streamsConfiguration.put(StreamsConfig.producerPrefix(ProducerConfig.BATCH_SIZE_CONFIG), PURGE_SEGMENT_BYTES / 2); // we cannot allow batch size larger than segment size final StreamsBuilder builder = new StreamsBuilder(); builder.stream(INPUT_TOPIC) .groupBy(MockMapper.selectKeyKeyValueMapper()) .count(); kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration, time); } @After public void shutdown() { if (kafkaStreams != null) { kafkaStreams.close(Duration.ofSeconds(30)); } } @Test public void shouldRestoreState() throws Exception { // produce some data to input topic final List<KeyValue<Integer, Integer>> messages = new ArrayList<>(); for (int i = 0; i < 1000; i++) { messages.add(new KeyValue<>(i, i)); } IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(INPUT_TOPIC, messages, TestUtils.producerConfig(CLUSTER.bootstrapServers(), IntegerSerializer.class, IntegerSerializer.class), time.milliseconds()); kafkaStreams.start(); TestUtils.waitForCondition(new RepartitionTopicCreatedWithExpectedConfigs(), 60000, "Repartition topic " + REPARTITION_TOPIC + " not created with the expected configs after 60000 ms."); TestUtils.waitForCondition( new RepartitionTopicVerified(currentSize -> currentSize > 0), 60000, "Repartition topic " + REPARTITION_TOPIC + " not received data after 60000 ms." ); // we need long enough timeout to by-pass the log manager's InitialTaskDelayMs, which is hard-coded on server side TestUtils.waitForCondition( new RepartitionTopicVerified(currentSize -> currentSize <= PURGE_SEGMENT_BYTES), 60000, "Repartition topic " + REPARTITION_TOPIC + " not purged data after 60000 ms." ); } }
package com.roylaurie.subcomm; import static org.junit.Assert.*; import java.io.IOException; import org.junit.Test; import com.roylaurie.subcomm.client.SubcommException; import com.roylaurie.subcomm.client.SubcommMessage; import com.roylaurie.subcomm.client.exception.SubcommIOException; import com.roylaurie.subcomm.client.message.SubcommChangeFrequencyMessage; import com.roylaurie.subcomm.client.message.SubcommChannelChatMessage; import com.roylaurie.subcomm.client.message.SubcommCommandMessage; import com.roylaurie.subcomm.client.message.SubcommFrequencyChatMessage; import com.roylaurie.subcomm.client.message.SubcommJoinArenaMessage; import com.roylaurie.subcomm.client.message.SubcommLoginMessage; import com.roylaurie.subcomm.client.message.SubcommLoginOkMessage; import com.roylaurie.subcomm.client.message.SubcommModeratorsChatMessage; import com.roylaurie.subcomm.client.message.SubcommNoOpMessage; import com.roylaurie.subcomm.client.message.SubcommPrivateChatMessage; import com.roylaurie.subcomm.client.message.SubcommPrivateCommandMessage; import com.roylaurie.subcomm.client.message.SubcommPublicChatMessage; import com.roylaurie.subcomm.client.message.SubcommSquadChatMessage; import com.roylaurie.subcomm.client.netchat.SubcommNetchatClient; import com.roylaurie.subcomm.client.netchat.SubcommNetchatMessageParser; import com.roylaurie.subcomm.test.DataGenerator; import com.roylaurie.subcomm.test.TestServer; /** * Performs tests against the SubcommClient class. * @author Roy Laurie <roy.laurie@gmail.com> */ public final class SubcommClientTest { private static final class ConnectResult { public final SubcommNetchatClient client; public final TestServer server; public ConnectResult(SubcommNetchatClient client, TestServer server) { this.client = client; this.server = server; } public void close() { client.disconnect(); server.close(); } } private static final long CONNECT_THREAD_TIMEOUT = 10000; private static final String HOST = "127.0.0.1"; private static final int PORT = 5555; private static final String TEST_CONNECT = "TestConnect"; private static final int NUM_RECEIVES = 20; private static final long WAIT_FOR_INPUT_TIMEOUT = 5000; /** * Tests connect setup and teardown as well as health reporting. * @throws IOException */ @Test public void testConnection() throws IOException { ConnectResult result = connect(); try { boolean wasConnected = result.client.connected(); result.close(); assertTrue(wasConnected); assertFalse(result.client.connected()); } finally { result.close(); } } /** * Tests sending all of the standard commands. * @throws IOException * @throws SubcommException */ @Test public void testSend() throws IOException, SubcommException { ConnectResult result = connect(); SubcommNetchatClient client = result.client; TestServer server = result.server; SubcommMessage expected; // sorted by classname try { // change frequency String frequency = DataGenerator.generate(); client.changeFrequency(frequency); expected = new SubcommChangeFrequencyMessage(frequency); assertEquals(expected, server.nextMesssageReceived()); // channel chat String channel = DataGenerator.generate(); String message = DataGenerator.generate(); client.chatChannel(channel, message); expected = new SubcommChannelChatMessage(channel, message); assertEquals(expected, server.nextMesssageReceived()); // command String command = DataGenerator.generate(); client.command(command); expected = new SubcommCommandMessage(command); assertEquals(expected, server.nextMesssageReceived()); // frequency chat frequency = DataGenerator.generate(); message = DataGenerator.generate(); client.chatFrequency(frequency, message); expected = new SubcommFrequencyChatMessage(frequency, message); assertEquals(expected, server.nextMesssageReceived()); // join arena String arena = DataGenerator.generate(); client.joinArena(arena); expected = new SubcommJoinArenaMessage(arena); assertEquals(expected, server.nextMesssageReceived()); // login - test indirectly String username = DataGenerator.generate(); String password = DataGenerator.generate(); expected = new SubcommLoginMessage(username, password); assertEquals(expected, SubcommNetchatMessageParser.parse(expected.getNetchatMessage())); // login ok - test indirectly expected = new SubcommLoginOkMessage(); assertEquals(expected, SubcommNetchatMessageParser.parse(expected.getNetchatMessage())); // moderators chat message = DataGenerator.generate(); client.chatModerators(message); expected = new SubcommModeratorsChatMessage(message); assertEquals(expected, server.nextMesssageReceived()); // no op - test indirectly expected = new SubcommNoOpMessage(); assertEquals(expected, SubcommNetchatMessageParser.parse(expected.getNetchatMessage())); // private chat username = DataGenerator.generate(); message = DataGenerator.generate(); client.chatPrivate(username, message); expected = new SubcommPrivateChatMessage(username, message); assertEquals(expected, server.nextMesssageReceived()); // public chat message = DataGenerator.generate(); client.chatPublic(message); expected = new SubcommPublicChatMessage(message); assertEquals(expected, server.nextMesssageReceived()); // private command username = DataGenerator.generate(); command = DataGenerator.generate(); client.commandPrivate(username, command); expected = new SubcommPrivateCommandMessage(username, command); assertEquals(expected, server.nextMesssageReceived()); // squad chat String squad = DataGenerator.generate(); message = DataGenerator.generate(); client.chatSquad(squad, message); expected = new SubcommSquadChatMessage(squad, message); assertEquals(expected, server.nextMesssageReceived()); } finally { result.close(); } } /** * Tests receiving commands. * @throws IOException * @throws SubcommIOException */ @Test public void testReceive() throws IOException, SubcommIOException { ConnectResult result = connect(); SubcommNetchatClient client = result.client; TestServer server = result.server; try { for (int i = 0; i < NUM_RECEIVES; ++i) { String username = DataGenerator.generate(); String message = DataGenerator.generate(); SubcommMessage expected = new SubcommPrivateChatMessage(username, message); server.send(expected); assertEquals(expected, waitForInput(client)); } } finally { result.close(); } } private SubcommMessage waitForInput(SubcommNetchatClient client) throws SubcommIOException { long timeoutTime = System.currentTimeMillis() + WAIT_FOR_INPUT_TIMEOUT; SubcommMessage message = client.nextReceivedMessage(); while (message == null && System.currentTimeMillis() < timeoutTime) { message = client.nextReceivedMessage(); } if (message == null) throw new SubcommIOException("No IO received"); return message; } /** * Starts a TestServer and SubcommClient and connects the two. Tests the login() phase using dummy data. * @return ConnectResult With two interconnected connected clients * @throws IOException Will close() connections before bubbling exceptions up */ private ConnectResult connect() throws IOException { TestServer server = new TestServer(HOST, PORT); try { server.bind(); final String username = DataGenerator.generate(); final String password = DataGenerator.generate(); final SubcommNetchatClient client = new SubcommNetchatClient(HOST, PORT, username, password); Thread connectThread = new Thread(new Runnable() { @Override public void run() { try { client.connect(); } catch (Exception e) { throw new RuntimeException(e); } synchronized(this) { this.notifyAll(); } } }); connectThread.setName(TEST_CONNECT); connectThread.start(); server.accept(); SubcommMessage expected = new SubcommLoginMessage(username, password); assertEquals(expected, server.nextMesssageReceived()); server.send(new SubcommLoginOkMessage()); try { synchronized(connectThread) { connectThread.wait(CONNECT_THREAD_TIMEOUT); } } catch (InterruptedException e) { /* do nothing */ } return new ConnectResult(client, server); } catch (RuntimeException e) { server.close(); throw e; } catch (IOException e) { server.close(); throw e; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2021 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrulesAlpha; import static fi.iki.elonen.NanoHTTPD.newFixedLengthResponse; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertEquals; import fi.iki.elonen.NanoHTTPD; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.junit.jupiter.api.Test; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpRequestHeader; import org.zaproxy.addon.commonlib.CommonAlertTag; import org.zaproxy.zap.testutils.NanoServerHandler; class WebCacheDeceptionScanRuleUnitTest extends ActiveScannerTest<WebCacheDeceptionScanRule> { private static final String AUTHORISED_RESPONSE = "<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n" + "<html><head></head><body>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec mattis ex ac orci consectetur viverra. Aenean porttitor tincidunt ligula. Suspendisse et ornare justo. Fusce vel maximus est. Donec id arcu nec justo egestas hendrerit. Sed pulvinar ultrices ultricies. Mauris ultrices odio non tellus mattis, id pharetra justo porta. Donec venenatis ante ac nisi blandit gravida. Nunc tellus dolor, finibus nec placerat ac, ullamcorper sit amet tellus.</body></html>"; private static final String UNAUTHORISED_RESPONSE = "<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n" + "<html><head></head><body>Lorem ipsum</body></html>"; private static final String NOT_FOUND_RESPONSE = "<!DOCTYPE HTML PUBLIC \"-//IETF//DTD HTML 2.0//EN\">\n" + "<html><head></head><body>404 NOT FOUND</body></html>"; @Override protected WebCacheDeceptionScanRule createScanner() { return new WebCacheDeceptionScanRule(); } @Test void shouldAlertIfResponseGetsCached() throws Exception { // Given HttpMessage message = this.getHttpMessage("/private"); HttpRequestHeader headers = message.getRequestHeader(); headers.addHeader("authorization", "Basic YWxhZGRpbjpvcGVuc2VzYW1l"); message.setRequestHeader(headers); nano.addHandler(new CachedTestResponse("/private", "authorization")); rule.init(message, this.parent); // When rule.scan(); // Then assertThat(alertsRaised, hasSize(1)); Alert alert = alertsRaised.get(0); assertEquals("/test.css,/test.js,/test.gif,/test.png,/test.svg,", alert.getAttack()); } @Test void shouldNotAlertIfInitialAuthorisedAndUnauthorisedResponseSame() throws Exception { // Given HttpMessage message = this.getHttpMessage("/private"); nano.addHandler(new FirstInitialTestResponse("/private")); rule.init(message, this.parent); // When rule.scan(); // Then assertThat(alertsRaised, hasSize(0)); assertEquals(1, httpMessagesSent.size()); } @Test void shouldNotAlertIfStaticPathAppendedGives404() throws Exception { // Given HttpMessage message = this.getHttpMessage("/private"); HttpRequestHeader headers = message.getRequestHeader(); headers.addHeader("authorization", "Basic YWxhZGRpbjpvcGVuc2VzYW1l"); message.setRequestHeader(headers); nano.addHandler(new SecondInitialTestResponse("/private", "authorization")); rule.init(message, this.parent); // When rule.scan(); // Then assertThat(alertsRaised, hasSize(0)); assertEquals(2, httpMessagesSent.size()); } @Test void shouldNotAlertIfResponseDoesNotGetsCached() throws Exception { // Given HttpMessage message = this.getHttpMessage("/private"); HttpRequestHeader headers = message.getRequestHeader(); headers.addHeader("authorization", "Basic YWxhZGRpbjpvcGVuc2VzYW1l"); message.setRequestHeader(headers); nano.addHandler(new NotCachedTestResponse("/private", "authorization")); rule.init(message, this.parent); // When rule.scan(); // Then assertThat(alertsRaised, hasSize(0)); assertEquals(15, httpMessagesSent.size()); } @Test void shouldReturnExpectedMappings() { // Given / When Map<String, String> tags = rule.getAlertTags(); // Then assertThat(tags.size(), is(equalTo(3))); assertThat( tags.containsKey(CommonAlertTag.OWASP_2021_A05_SEC_MISCONFIG.getTag()), is(equalTo(true))); assertThat( tags.containsKey(CommonAlertTag.OWASP_2017_A06_SEC_MISCONFIG.getTag()), is(equalTo(true))); assertThat( tags.containsKey(CommonAlertTag.WSTG_V42_ATHN_06_CACHE_WEAKNESS.getTag()), is(equalTo(true))); assertThat( tags.get(CommonAlertTag.OWASP_2021_A05_SEC_MISCONFIG.getTag()), is(equalTo(CommonAlertTag.OWASP_2021_A05_SEC_MISCONFIG.getValue()))); assertThat( tags.get(CommonAlertTag.OWASP_2017_A06_SEC_MISCONFIG.getTag()), is(equalTo(CommonAlertTag.OWASP_2017_A06_SEC_MISCONFIG.getValue()))); assertThat( tags.get(CommonAlertTag.WSTG_V42_ATHN_06_CACHE_WEAKNESS.getTag()), is(equalTo(CommonAlertTag.WSTG_V42_ATHN_06_CACHE_WEAKNESS.getValue()))); } private static class CachedTestResponse extends NanoServerHandler { private final String header; CachedTestResponse(String path, String header) { super(path); this.header = header; } @Override protected NanoHTTPD.Response serve(NanoHTTPD.IHTTPSession session) { Pattern pattern = Pattern.compile("/test.(css|js|gif|png|svg)", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(session.getUri()); boolean matchFound = matcher.find(); if (matchFound) { return newFixedLengthResponse( NanoHTTPD.Response.Status.OK, "text/html", AUTHORISED_RESPONSE); } if (session.getHeaders().get(header) != null) { return newFixedLengthResponse( NanoHTTPD.Response.Status.OK, "text/html", AUTHORISED_RESPONSE); } return newFixedLengthResponse( NanoHTTPD.Response.Status.UNAUTHORIZED, "text/html", UNAUTHORISED_RESPONSE); } } private static class NotCachedTestResponse extends NanoServerHandler { private final String header; NotCachedTestResponse(String path, String header) { super(path); this.header = header; } @Override protected NanoHTTPD.Response serve(NanoHTTPD.IHTTPSession session) { if (session.getHeaders().get(header) != null) { return newFixedLengthResponse( NanoHTTPD.Response.Status.OK, "text/html", AUTHORISED_RESPONSE); } return newFixedLengthResponse( NanoHTTPD.Response.Status.UNAUTHORIZED, "text/html", UNAUTHORISED_RESPONSE); } } private static class FirstInitialTestResponse extends NanoServerHandler { FirstInitialTestResponse(String path) { super(path); } @Override protected NanoHTTPD.Response serve(NanoHTTPD.IHTTPSession session) { return newFixedLengthResponse( NanoHTTPD.Response.Status.OK, "text/html", AUTHORISED_RESPONSE); } } private static class SecondInitialTestResponse extends NanoServerHandler { private final String header; SecondInitialTestResponse(String path, String header) { super(path); this.header = header; } @Override protected NanoHTTPD.Response serve(NanoHTTPD.IHTTPSession session) { Pattern pattern = Pattern.compile("/test", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(session.getUri()); boolean matchFound = matcher.find(); if (matchFound) { return newFixedLengthResponse( NanoHTTPD.Response.Status.NOT_FOUND, "text/html", NOT_FOUND_RESPONSE); } if (session.getHeaders().get(header) != null) { return newFixedLengthResponse( NanoHTTPD.Response.Status.OK, "text/html", AUTHORISED_RESPONSE); } return newFixedLengthResponse( NanoHTTPD.Response.Status.UNAUTHORIZED, "text/html", UNAUTHORISED_RESPONSE); } } }
// Copyright (c) 2014, Facebook, Inc. All rights reserved. // This source code is licensed under the BSD-style license found in the // LICENSE file in the root directory of this source tree. An additional grant // of patent rights can be found in the PATENTS file in the same directory. package org.rocksdb; import java.util.ArrayList; import java.util.List; /** * Options to control the behavior of a database. It will be used * during the creation of a {@link org.rocksdb.RocksDB} (i.e., RocksDB.open()). * * If {@link #dispose()} function is not called, then it will be GC'd automatically * and native resources will be released as part of the process. */ public class Options extends RocksObject implements DBOptionsInterface, ColumnFamilyOptionsInterface { static { RocksDB.loadLibrary(); } /** * Construct options for opening a RocksDB. * * This constructor will create (by allocating a block of memory) * an {@code rocksdb::Options} in the c++ side. */ public Options() { super(); newOptions(); env_ = Env.getDefault(); } /** * Construct options for opening a RocksDB. Reusing database options * and column family options. * * @param dbOptions {@link org.rocksdb.DBOptions} instance * @param columnFamilyOptions {@link org.rocksdb.ColumnFamilyOptions} * instance */ public Options(final DBOptions dbOptions, final ColumnFamilyOptions columnFamilyOptions) { super(); newOptions(dbOptions.nativeHandle_, columnFamilyOptions.nativeHandle_); env_ = Env.getDefault(); } @Override public Options setIncreaseParallelism(final int totalThreads) { assert(isInitialized()); setIncreaseParallelism(nativeHandle_, totalThreads); return this; } @Override public Options setCreateIfMissing(final boolean flag) { assert(isInitialized()); setCreateIfMissing(nativeHandle_, flag); return this; } @Override public Options setCreateMissingColumnFamilies(final boolean flag) { assert(isInitialized()); setCreateMissingColumnFamilies(nativeHandle_, flag); return this; } /** * Use the specified object to interact with the environment, * e.g. to read/write files, schedule background work, etc. * Default: {@link Env#getDefault()} * * @param env {@link Env} instance. * @return the instance of the current Options. */ public Options setEnv(final Env env) { assert(isInitialized()); setEnv(nativeHandle_, env.nativeHandle_); env_ = env; return this; } /** * Returns the set RocksEnv instance. * * @return {@link RocksEnv} instance set in the Options. */ public Env getEnv() { return env_; } /** * <p>Set appropriate parameters for bulk loading. * The reason that this is a function that returns "this" instead of a * constructor is to enable chaining of multiple similar calls in the future. * </p> * * <p>All data will be in level 0 without any automatic compaction. * It's recommended to manually call CompactRange(NULL, NULL) before reading * from the database, because otherwise the read can be very slow.</p> * * @return the instance of the current Options. */ public Options prepareForBulkLoad() { prepareForBulkLoad(nativeHandle_); return this; } @Override public boolean createIfMissing() { assert(isInitialized()); return createIfMissing(nativeHandle_); } @Override public boolean createMissingColumnFamilies() { assert(isInitialized()); return createMissingColumnFamilies(nativeHandle_); } @Override public Options optimizeForPointLookup( long blockCacheSizeMb) { optimizeForPointLookup(nativeHandle_, blockCacheSizeMb); return this; } @Override public Options optimizeLevelStyleCompaction() { optimizeLevelStyleCompaction(nativeHandle_, DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET); return this; } @Override public Options optimizeLevelStyleCompaction( long memtableMemoryBudget) { optimizeLevelStyleCompaction(nativeHandle_, memtableMemoryBudget); return this; } @Override public Options optimizeUniversalStyleCompaction() { optimizeUniversalStyleCompaction(nativeHandle_, DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET); return this; } @Override public Options optimizeUniversalStyleCompaction( final long memtableMemoryBudget) { optimizeUniversalStyleCompaction(nativeHandle_, memtableMemoryBudget); return this; } @Override public Options setComparator(final BuiltinComparator builtinComparator) { assert(isInitialized()); setComparatorHandle(nativeHandle_, builtinComparator.ordinal()); return this; } @Override public Options setComparator( final AbstractComparator<? extends AbstractSlice<?>> comparator) { assert (isInitialized()); setComparatorHandle(nativeHandle_, comparator.nativeHandle_); comparator_ = comparator; return this; } @Override public Options setMergeOperatorName(final String name) { assert (isInitialized()); if (name == null) { throw new IllegalArgumentException( "Merge operator name must not be null."); } setMergeOperatorName(nativeHandle_, name); return this; } @Override public Options setMergeOperator(final MergeOperator mergeOperator) { setMergeOperator(nativeHandle_, mergeOperator.newMergeOperatorHandle()); return this; } @Override public Options setWriteBufferSize(final long writeBufferSize) { assert(isInitialized()); setWriteBufferSize(nativeHandle_, writeBufferSize); return this; } @Override public long writeBufferSize() { assert(isInitialized()); return writeBufferSize(nativeHandle_); } @Override public Options setMaxWriteBufferNumber(final int maxWriteBufferNumber) { assert(isInitialized()); setMaxWriteBufferNumber(nativeHandle_, maxWriteBufferNumber); return this; } @Override public int maxWriteBufferNumber() { assert(isInitialized()); return maxWriteBufferNumber(nativeHandle_); } @Override public boolean errorIfExists() { assert(isInitialized()); return errorIfExists(nativeHandle_); } @Override public Options setErrorIfExists(final boolean errorIfExists) { assert(isInitialized()); setErrorIfExists(nativeHandle_, errorIfExists); return this; } @Override public boolean paranoidChecks() { assert(isInitialized()); return paranoidChecks(nativeHandle_); } @Override public Options setParanoidChecks(final boolean paranoidChecks) { assert(isInitialized()); setParanoidChecks(nativeHandle_, paranoidChecks); return this; } @Override public int maxOpenFiles() { assert(isInitialized()); return maxOpenFiles(nativeHandle_); } @Override public Options setMaxTotalWalSize(final long maxTotalWalSize) { assert(isInitialized()); setMaxTotalWalSize(nativeHandle_, maxTotalWalSize); return this; } @Override public long maxTotalWalSize() { assert(isInitialized()); return maxTotalWalSize(nativeHandle_); } @Override public Options setMaxOpenFiles(final int maxOpenFiles) { assert(isInitialized()); setMaxOpenFiles(nativeHandle_, maxOpenFiles); return this; } @Override public boolean disableDataSync() { assert(isInitialized()); return disableDataSync(nativeHandle_); } @Override public Options setDisableDataSync(final boolean disableDataSync) { assert(isInitialized()); setDisableDataSync(nativeHandle_, disableDataSync); return this; } @Override public boolean useFsync() { assert(isInitialized()); return useFsync(nativeHandle_); } @Override public Options setUseFsync(final boolean useFsync) { assert(isInitialized()); setUseFsync(nativeHandle_, useFsync); return this; } @Override public String dbLogDir() { assert(isInitialized()); return dbLogDir(nativeHandle_); } @Override public Options setDbLogDir(final String dbLogDir) { assert(isInitialized()); setDbLogDir(nativeHandle_, dbLogDir); return this; } @Override public String walDir() { assert(isInitialized()); return walDir(nativeHandle_); } @Override public Options setWalDir(final String walDir) { assert(isInitialized()); setWalDir(nativeHandle_, walDir); return this; } @Override public long deleteObsoleteFilesPeriodMicros() { assert(isInitialized()); return deleteObsoleteFilesPeriodMicros(nativeHandle_); } @Override public Options setDeleteObsoleteFilesPeriodMicros( final long micros) { assert(isInitialized()); setDeleteObsoleteFilesPeriodMicros(nativeHandle_, micros); return this; } @Override public int maxBackgroundCompactions() { assert(isInitialized()); return maxBackgroundCompactions(nativeHandle_); } @Override public Options createStatistics() { assert(isInitialized()); createStatistics(nativeHandle_); return this; } @Override public Statistics statisticsPtr() { assert(isInitialized()); long statsPtr = statisticsPtr(nativeHandle_); if(statsPtr == 0) { createStatistics(); statsPtr = statisticsPtr(nativeHandle_); } return new Statistics(statsPtr); } @Override public Options setMaxBackgroundCompactions( final int maxBackgroundCompactions) { assert(isInitialized()); setMaxBackgroundCompactions(nativeHandle_, maxBackgroundCompactions); return this; } @Override public int maxBackgroundFlushes() { assert(isInitialized()); return maxBackgroundFlushes(nativeHandle_); } @Override public Options setMaxBackgroundFlushes( final int maxBackgroundFlushes) { assert(isInitialized()); setMaxBackgroundFlushes(nativeHandle_, maxBackgroundFlushes); return this; } @Override public long maxLogFileSize() { assert(isInitialized()); return maxLogFileSize(nativeHandle_); } @Override public Options setMaxLogFileSize(final long maxLogFileSize) { assert(isInitialized()); setMaxLogFileSize(nativeHandle_, maxLogFileSize); return this; } @Override public long logFileTimeToRoll() { assert(isInitialized()); return logFileTimeToRoll(nativeHandle_); } @Override public Options setLogFileTimeToRoll(final long logFileTimeToRoll) { assert(isInitialized()); setLogFileTimeToRoll(nativeHandle_, logFileTimeToRoll); return this; } @Override public long keepLogFileNum() { assert(isInitialized()); return keepLogFileNum(nativeHandle_); } @Override public Options setKeepLogFileNum(final long keepLogFileNum) { assert(isInitialized()); setKeepLogFileNum(nativeHandle_, keepLogFileNum); return this; } @Override public long maxManifestFileSize() { assert(isInitialized()); return maxManifestFileSize(nativeHandle_); } @Override public Options setMaxManifestFileSize( final long maxManifestFileSize) { assert(isInitialized()); setMaxManifestFileSize(nativeHandle_, maxManifestFileSize); return this; } @Override public Options setMaxTableFilesSizeFIFO( final long maxTableFilesSize) { assert(maxTableFilesSize > 0); // unsigned native type assert(isInitialized()); setMaxTableFilesSizeFIFO(nativeHandle_, maxTableFilesSize); return this; } @Override public long maxTableFilesSizeFIFO() { return maxTableFilesSizeFIFO(nativeHandle_); } @Override public int tableCacheNumshardbits() { assert(isInitialized()); return tableCacheNumshardbits(nativeHandle_); } @Override public Options setTableCacheNumshardbits( final int tableCacheNumshardbits) { assert(isInitialized()); setTableCacheNumshardbits(nativeHandle_, tableCacheNumshardbits); return this; } @Override public long walTtlSeconds() { assert(isInitialized()); return walTtlSeconds(nativeHandle_); } @Override public Options setWalTtlSeconds(final long walTtlSeconds) { assert(isInitialized()); setWalTtlSeconds(nativeHandle_, walTtlSeconds); return this; } @Override public long walSizeLimitMB() { assert(isInitialized()); return walSizeLimitMB(nativeHandle_); } @Override public Options setWalSizeLimitMB(final long sizeLimitMB) { assert(isInitialized()); setWalSizeLimitMB(nativeHandle_, sizeLimitMB); return this; } @Override public long manifestPreallocationSize() { assert(isInitialized()); return manifestPreallocationSize(nativeHandle_); } @Override public Options setManifestPreallocationSize(final long size) { assert(isInitialized()); setManifestPreallocationSize(nativeHandle_, size); return this; } @Override public boolean allowOsBuffer() { assert(isInitialized()); return allowOsBuffer(nativeHandle_); } @Override public Options setAllowOsBuffer(final boolean allowOsBuffer) { assert(isInitialized()); setAllowOsBuffer(nativeHandle_, allowOsBuffer); return this; } @Override public boolean allowMmapReads() { assert(isInitialized()); return allowMmapReads(nativeHandle_); } @Override public Options setAllowMmapReads(final boolean allowMmapReads) { assert(isInitialized()); setAllowMmapReads(nativeHandle_, allowMmapReads); return this; } @Override public boolean allowMmapWrites() { assert(isInitialized()); return allowMmapWrites(nativeHandle_); } @Override public Options setAllowMmapWrites(final boolean allowMmapWrites) { assert(isInitialized()); setAllowMmapWrites(nativeHandle_, allowMmapWrites); return this; } @Override public boolean isFdCloseOnExec() { assert(isInitialized()); return isFdCloseOnExec(nativeHandle_); } @Override public Options setIsFdCloseOnExec(final boolean isFdCloseOnExec) { assert(isInitialized()); setIsFdCloseOnExec(nativeHandle_, isFdCloseOnExec); return this; } @Override public int statsDumpPeriodSec() { assert(isInitialized()); return statsDumpPeriodSec(nativeHandle_); } @Override public Options setStatsDumpPeriodSec(final int statsDumpPeriodSec) { assert(isInitialized()); setStatsDumpPeriodSec(nativeHandle_, statsDumpPeriodSec); return this; } @Override public boolean adviseRandomOnOpen() { return adviseRandomOnOpen(nativeHandle_); } @Override public Options setAdviseRandomOnOpen(final boolean adviseRandomOnOpen) { assert(isInitialized()); setAdviseRandomOnOpen(nativeHandle_, adviseRandomOnOpen); return this; } @Override public boolean useAdaptiveMutex() { assert(isInitialized()); return useAdaptiveMutex(nativeHandle_); } @Override public Options setUseAdaptiveMutex(final boolean useAdaptiveMutex) { assert(isInitialized()); setUseAdaptiveMutex(nativeHandle_, useAdaptiveMutex); return this; } @Override public long bytesPerSync() { return bytesPerSync(nativeHandle_); } @Override public Options setBytesPerSync(final long bytesPerSync) { assert(isInitialized()); setBytesPerSync(nativeHandle_, bytesPerSync); return this; } @Override public Options setMemTableConfig(final MemTableConfig config) { memTableConfig_ = config; setMemTableFactory(nativeHandle_, config.newMemTableFactoryHandle()); return this; } @Override public Options setRateLimiterConfig(final RateLimiterConfig config) { rateLimiterConfig_ = config; setRateLimiter(nativeHandle_, config.newRateLimiterHandle()); return this; } @Override public Options setLogger(final Logger logger) { assert(isInitialized()); setLogger(nativeHandle_, logger.nativeHandle_); return this; } @Override public Options setInfoLogLevel(final InfoLogLevel infoLogLevel) { assert(isInitialized()); setInfoLogLevel(nativeHandle_, infoLogLevel.getValue()); return this; } @Override public InfoLogLevel infoLogLevel() { assert(isInitialized()); return InfoLogLevel.getInfoLogLevel( infoLogLevel(nativeHandle_)); } @Override public String memTableFactoryName() { assert(isInitialized()); return memTableFactoryName(nativeHandle_); } @Override public Options setTableFormatConfig(final TableFormatConfig config) { tableFormatConfig_ = config; setTableFactory(nativeHandle_, config.newTableFactoryHandle()); return this; } @Override public String tableFactoryName() { assert(isInitialized()); return tableFactoryName(nativeHandle_); } @Override public Options useFixedLengthPrefixExtractor(final int n) { assert(isInitialized()); useFixedLengthPrefixExtractor(nativeHandle_, n); return this; } @Override public Options useCappedPrefixExtractor(final int n) { assert(isInitialized()); useCappedPrefixExtractor(nativeHandle_, n); return this; } @Override public CompressionType compressionType() { return CompressionType.values()[compressionType(nativeHandle_)]; } @Override public Options setCompressionPerLevel(final List<CompressionType> compressionLevels) { final List<Byte> byteCompressionTypes = new ArrayList<>( compressionLevels.size()); for (final CompressionType compressionLevel : compressionLevels) { byteCompressionTypes.add(compressionLevel.getValue()); } setCompressionPerLevel(nativeHandle_, byteCompressionTypes); return this; } @Override public List<CompressionType> compressionPerLevel() { final List<Byte> byteCompressionTypes = compressionPerLevel(nativeHandle_); final List<CompressionType> compressionLevels = new ArrayList<>(); for (final Byte byteCompressionType : byteCompressionTypes) { compressionLevels.add(CompressionType.getCompressionType( byteCompressionType)); } return compressionLevels; } @Override public Options setCompressionType(CompressionType compressionType) { setCompressionType(nativeHandle_, compressionType.getValue()); return this; } @Override public CompactionStyle compactionStyle() { return CompactionStyle.values()[compactionStyle(nativeHandle_)]; } @Override public Options setCompactionStyle( final CompactionStyle compactionStyle) { setCompactionStyle(nativeHandle_, compactionStyle.getValue()); return this; } @Override public int numLevels() { return numLevels(nativeHandle_); } @Override public Options setNumLevels(int numLevels) { setNumLevels(nativeHandle_, numLevels); return this; } @Override public int levelZeroFileNumCompactionTrigger() { return levelZeroFileNumCompactionTrigger(nativeHandle_); } @Override public Options setLevelZeroFileNumCompactionTrigger( final int numFiles) { setLevelZeroFileNumCompactionTrigger( nativeHandle_, numFiles); return this; } @Override public int levelZeroSlowdownWritesTrigger() { return levelZeroSlowdownWritesTrigger(nativeHandle_); } @Override public Options setLevelZeroSlowdownWritesTrigger( final int numFiles) { setLevelZeroSlowdownWritesTrigger(nativeHandle_, numFiles); return this; } @Override public int levelZeroStopWritesTrigger() { return levelZeroStopWritesTrigger(nativeHandle_); } @Override public Options setLevelZeroStopWritesTrigger( final int numFiles) { setLevelZeroStopWritesTrigger(nativeHandle_, numFiles); return this; } @Override public int maxMemCompactionLevel() { return 0; } @Override public Options setMaxMemCompactionLevel( final int maxMemCompactionLevel) { return this; } @Override public long targetFileSizeBase() { return targetFileSizeBase(nativeHandle_); } @Override public Options setTargetFileSizeBase(long targetFileSizeBase) { setTargetFileSizeBase(nativeHandle_, targetFileSizeBase); return this; } @Override public int targetFileSizeMultiplier() { return targetFileSizeMultiplier(nativeHandle_); } @Override public Options setTargetFileSizeMultiplier(int multiplier) { setTargetFileSizeMultiplier(nativeHandle_, multiplier); return this; } @Override public Options setMaxBytesForLevelBase(final long maxBytesForLevelBase) { setMaxBytesForLevelBase(nativeHandle_, maxBytesForLevelBase); return this; } @Override public long maxBytesForLevelBase() { return maxBytesForLevelBase(nativeHandle_); } @Override public Options setLevelCompactionDynamicLevelBytes( final boolean enableLevelCompactionDynamicLevelBytes) { setLevelCompactionDynamicLevelBytes(nativeHandle_, enableLevelCompactionDynamicLevelBytes); return this; } @Override public boolean levelCompactionDynamicLevelBytes() { return levelCompactionDynamicLevelBytes(nativeHandle_); } @Override public int maxBytesForLevelMultiplier() { return maxBytesForLevelMultiplier(nativeHandle_); } @Override public Options setMaxBytesForLevelMultiplier(final int multiplier) { setMaxBytesForLevelMultiplier(nativeHandle_, multiplier); return this; } @Override public int expandedCompactionFactor() { return expandedCompactionFactor(nativeHandle_); } @Override public Options setExpandedCompactionFactor( final int expandedCompactionFactor) { setExpandedCompactionFactor(nativeHandle_, expandedCompactionFactor); return this; } @Override public int sourceCompactionFactor() { return sourceCompactionFactor(nativeHandle_); } @Override public Options setSourceCompactionFactor(int sourceCompactionFactor) { setSourceCompactionFactor(nativeHandle_, sourceCompactionFactor); return this; } @Override public int maxGrandparentOverlapFactor() { return maxGrandparentOverlapFactor(nativeHandle_); } @Override public Options setMaxGrandparentOverlapFactor( final int maxGrandparentOverlapFactor) { setMaxGrandparentOverlapFactor(nativeHandle_, maxGrandparentOverlapFactor); return this; } @Override public double softRateLimit() { return softRateLimit(nativeHandle_); } @Override public Options setSoftRateLimit(final double softRateLimit) { setSoftRateLimit(nativeHandle_, softRateLimit); return this; } @Override public double hardRateLimit() { return hardRateLimit(nativeHandle_); } @Override public Options setHardRateLimit(double hardRateLimit) { setHardRateLimit(nativeHandle_, hardRateLimit); return this; } @Override public int rateLimitDelayMaxMilliseconds() { return rateLimitDelayMaxMilliseconds(nativeHandle_); } @Override public Options setRateLimitDelayMaxMilliseconds( final int rateLimitDelayMaxMilliseconds) { setRateLimitDelayMaxMilliseconds( nativeHandle_, rateLimitDelayMaxMilliseconds); return this; } @Override public long arenaBlockSize() { return arenaBlockSize(nativeHandle_); } @Override public Options setArenaBlockSize(final long arenaBlockSize) { setArenaBlockSize(nativeHandle_, arenaBlockSize); return this; } @Override public boolean disableAutoCompactions() { return disableAutoCompactions(nativeHandle_); } @Override public Options setDisableAutoCompactions( final boolean disableAutoCompactions) { setDisableAutoCompactions(nativeHandle_, disableAutoCompactions); return this; } @Override public boolean purgeRedundantKvsWhileFlush() { return purgeRedundantKvsWhileFlush(nativeHandle_); } @Override public Options setPurgeRedundantKvsWhileFlush( final boolean purgeRedundantKvsWhileFlush) { setPurgeRedundantKvsWhileFlush( nativeHandle_, purgeRedundantKvsWhileFlush); return this; } @Override public boolean verifyChecksumsInCompaction() { return verifyChecksumsInCompaction(nativeHandle_); } @Override public Options setVerifyChecksumsInCompaction( final boolean verifyChecksumsInCompaction) { setVerifyChecksumsInCompaction( nativeHandle_, verifyChecksumsInCompaction); return this; } @Override public boolean filterDeletes() { return filterDeletes(nativeHandle_); } @Override public Options setFilterDeletes( final boolean filterDeletes) { setFilterDeletes(nativeHandle_, filterDeletes); return this; } @Override public long maxSequentialSkipInIterations() { return maxSequentialSkipInIterations(nativeHandle_); } @Override public Options setMaxSequentialSkipInIterations( final long maxSequentialSkipInIterations) { setMaxSequentialSkipInIterations(nativeHandle_, maxSequentialSkipInIterations); return this; } @Override public boolean inplaceUpdateSupport() { return inplaceUpdateSupport(nativeHandle_); } @Override public Options setInplaceUpdateSupport( final boolean inplaceUpdateSupport) { setInplaceUpdateSupport(nativeHandle_, inplaceUpdateSupport); return this; } @Override public long inplaceUpdateNumLocks() { return inplaceUpdateNumLocks(nativeHandle_); } @Override public Options setInplaceUpdateNumLocks( final long inplaceUpdateNumLocks) { setInplaceUpdateNumLocks(nativeHandle_, inplaceUpdateNumLocks); return this; } @Override public int memtablePrefixBloomBits() { return memtablePrefixBloomBits(nativeHandle_); } @Override public Options setMemtablePrefixBloomBits( final int memtablePrefixBloomBits) { setMemtablePrefixBloomBits(nativeHandle_, memtablePrefixBloomBits); return this; } @Override public int memtablePrefixBloomProbes() { return memtablePrefixBloomProbes(nativeHandle_); } @Override public Options setMemtablePrefixBloomProbes( final int memtablePrefixBloomProbes) { setMemtablePrefixBloomProbes(nativeHandle_, memtablePrefixBloomProbes); return this; } @Override public int bloomLocality() { return bloomLocality(nativeHandle_); } @Override public Options setBloomLocality(final int bloomLocality) { setBloomLocality(nativeHandle_, bloomLocality); return this; } @Override public long maxSuccessiveMerges() { return maxSuccessiveMerges(nativeHandle_); } @Override public Options setMaxSuccessiveMerges(long maxSuccessiveMerges) { setMaxSuccessiveMerges(nativeHandle_, maxSuccessiveMerges); return this; } @Override public int minWriteBufferNumberToMerge() { return minWriteBufferNumberToMerge(nativeHandle_); } @Override public Options setMinWriteBufferNumberToMerge( final int minWriteBufferNumberToMerge) { setMinWriteBufferNumberToMerge(nativeHandle_, minWriteBufferNumberToMerge); return this; } @Override public int minPartialMergeOperands() { return minPartialMergeOperands(nativeHandle_); } @Override public Options setMinPartialMergeOperands( final int minPartialMergeOperands) { setMinPartialMergeOperands(nativeHandle_, minPartialMergeOperands); return this; } @Override public Options setOptimizeFiltersForHits( final boolean optimizeFiltersForHits) { setOptimizeFiltersForHits(nativeHandle_, optimizeFiltersForHits); return this; } @Override public boolean optimizeFiltersForHits() { return optimizeFiltersForHits(nativeHandle_); } /** * Release the memory allocated for the current instance * in the c++ side. */ @Override protected void disposeInternal() { assert(isInitialized()); disposeInternal(nativeHandle_); } private native void newOptions(); private native void newOptions(long dbOptHandle, long cfOptHandle); private native void disposeInternal(long handle); private native void setEnv(long optHandle, long envHandle); private native void prepareForBulkLoad(long handle); // DB native handles private native void setIncreaseParallelism(long handle, int totalThreads); private native void setCreateIfMissing(long handle, boolean flag); private native boolean createIfMissing(long handle); private native void setCreateMissingColumnFamilies( long handle, boolean flag); private native boolean createMissingColumnFamilies(long handle); private native void setErrorIfExists(long handle, boolean errorIfExists); private native boolean errorIfExists(long handle); private native void setParanoidChecks( long handle, boolean paranoidChecks); private native boolean paranoidChecks(long handle); private native void setRateLimiter(long handle, long rateLimiterHandle); private native void setLogger(long handle, long loggerHandle); private native void setInfoLogLevel(long handle, byte logLevel); private native byte infoLogLevel(long handle); private native void setMaxOpenFiles(long handle, int maxOpenFiles); private native int maxOpenFiles(long handle); private native void setMaxTotalWalSize(long handle, long maxTotalWalSize); private native long maxTotalWalSize(long handle); private native void createStatistics(long optHandle); private native long statisticsPtr(long optHandle); private native void setDisableDataSync(long handle, boolean disableDataSync); private native boolean disableDataSync(long handle); private native boolean useFsync(long handle); private native void setUseFsync(long handle, boolean useFsync); private native void setDbLogDir(long handle, String dbLogDir); private native String dbLogDir(long handle); private native void setWalDir(long handle, String walDir); private native String walDir(long handle); private native void setDeleteObsoleteFilesPeriodMicros( long handle, long micros); private native long deleteObsoleteFilesPeriodMicros(long handle); private native void setMaxBackgroundCompactions( long handle, int maxBackgroundCompactions); private native int maxBackgroundCompactions(long handle); private native void setMaxBackgroundFlushes( long handle, int maxBackgroundFlushes); private native int maxBackgroundFlushes(long handle); private native void setMaxLogFileSize(long handle, long maxLogFileSize) throws IllegalArgumentException; private native long maxLogFileSize(long handle); private native void setLogFileTimeToRoll( long handle, long logFileTimeToRoll) throws IllegalArgumentException; private native long logFileTimeToRoll(long handle); private native void setKeepLogFileNum(long handle, long keepLogFileNum) throws IllegalArgumentException; private native long keepLogFileNum(long handle); private native void setMaxManifestFileSize( long handle, long maxManifestFileSize); private native long maxManifestFileSize(long handle); private native void setMaxTableFilesSizeFIFO( long handle, long maxTableFilesSize); private native long maxTableFilesSizeFIFO(long handle); private native void setTableCacheNumshardbits( long handle, int tableCacheNumshardbits); private native int tableCacheNumshardbits(long handle); private native void setWalTtlSeconds(long handle, long walTtlSeconds); private native long walTtlSeconds(long handle); private native void setWalSizeLimitMB(long handle, long sizeLimitMB); private native long walSizeLimitMB(long handle); private native void setManifestPreallocationSize( long handle, long size) throws IllegalArgumentException; private native long manifestPreallocationSize(long handle); private native void setAllowOsBuffer( long handle, boolean allowOsBuffer); private native boolean allowOsBuffer(long handle); private native void setAllowMmapReads( long handle, boolean allowMmapReads); private native boolean allowMmapReads(long handle); private native void setAllowMmapWrites( long handle, boolean allowMmapWrites); private native boolean allowMmapWrites(long handle); private native void setIsFdCloseOnExec( long handle, boolean isFdCloseOnExec); private native boolean isFdCloseOnExec(long handle); private native void setStatsDumpPeriodSec( long handle, int statsDumpPeriodSec); private native int statsDumpPeriodSec(long handle); private native void setAdviseRandomOnOpen( long handle, boolean adviseRandomOnOpen); private native boolean adviseRandomOnOpen(long handle); private native void setUseAdaptiveMutex( long handle, boolean useAdaptiveMutex); private native boolean useAdaptiveMutex(long handle); private native void setBytesPerSync( long handle, long bytesPerSync); private native long bytesPerSync(long handle); // CF native handles private native void optimizeForPointLookup(long handle, long blockCacheSizeMb); private native void optimizeLevelStyleCompaction(long handle, long memtableMemoryBudget); private native void optimizeUniversalStyleCompaction(long handle, long memtableMemoryBudget); private native void setComparatorHandle(long handle, int builtinComparator); private native void setComparatorHandle(long optHandle, long comparatorHandle); private native void setMergeOperatorName( long handle, String name); private native void setMergeOperator( long handle, long mergeOperatorHandle); private native void setWriteBufferSize(long handle, long writeBufferSize) throws IllegalArgumentException; private native long writeBufferSize(long handle); private native void setMaxWriteBufferNumber( long handle, int maxWriteBufferNumber); private native int maxWriteBufferNumber(long handle); private native void setMinWriteBufferNumberToMerge( long handle, int minWriteBufferNumberToMerge); private native int minWriteBufferNumberToMerge(long handle); private native void setCompressionType(long handle, byte compressionType); private native byte compressionType(long handle); private native void setCompressionPerLevel(long handle, List<Byte> compressionLevels); private native List<Byte> compressionPerLevel(long handle); private native void useFixedLengthPrefixExtractor( long handle, int prefixLength); private native void useCappedPrefixExtractor( long handle, int prefixLength); private native void setNumLevels( long handle, int numLevels); private native int numLevels(long handle); private native void setLevelZeroFileNumCompactionTrigger( long handle, int numFiles); private native int levelZeroFileNumCompactionTrigger(long handle); private native void setLevelZeroSlowdownWritesTrigger( long handle, int numFiles); private native int levelZeroSlowdownWritesTrigger(long handle); private native void setLevelZeroStopWritesTrigger( long handle, int numFiles); private native int levelZeroStopWritesTrigger(long handle); private native void setTargetFileSizeBase( long handle, long targetFileSizeBase); private native long targetFileSizeBase(long handle); private native void setTargetFileSizeMultiplier( long handle, int multiplier); private native int targetFileSizeMultiplier(long handle); private native void setMaxBytesForLevelBase( long handle, long maxBytesForLevelBase); private native long maxBytesForLevelBase(long handle); private native void setLevelCompactionDynamicLevelBytes( long handle, boolean enableLevelCompactionDynamicLevelBytes); private native boolean levelCompactionDynamicLevelBytes( long handle); private native void setMaxBytesForLevelMultiplier( long handle, int multiplier); private native int maxBytesForLevelMultiplier(long handle); private native void setExpandedCompactionFactor( long handle, int expandedCompactionFactor); private native int expandedCompactionFactor(long handle); private native void setSourceCompactionFactor( long handle, int sourceCompactionFactor); private native int sourceCompactionFactor(long handle); private native void setMaxGrandparentOverlapFactor( long handle, int maxGrandparentOverlapFactor); private native int maxGrandparentOverlapFactor(long handle); private native void setSoftRateLimit( long handle, double softRateLimit); private native double softRateLimit(long handle); private native void setHardRateLimit( long handle, double hardRateLimit); private native double hardRateLimit(long handle); private native void setRateLimitDelayMaxMilliseconds( long handle, int rateLimitDelayMaxMilliseconds); private native int rateLimitDelayMaxMilliseconds(long handle); private native void setArenaBlockSize( long handle, long arenaBlockSize) throws IllegalArgumentException; private native long arenaBlockSize(long handle); private native void setDisableAutoCompactions( long handle, boolean disableAutoCompactions); private native boolean disableAutoCompactions(long handle); private native void setCompactionStyle(long handle, byte compactionStyle); private native byte compactionStyle(long handle); private native void setPurgeRedundantKvsWhileFlush( long handle, boolean purgeRedundantKvsWhileFlush); private native boolean purgeRedundantKvsWhileFlush(long handle); private native void setVerifyChecksumsInCompaction( long handle, boolean verifyChecksumsInCompaction); private native boolean verifyChecksumsInCompaction(long handle); private native void setFilterDeletes( long handle, boolean filterDeletes); private native boolean filterDeletes(long handle); private native void setMaxSequentialSkipInIterations( long handle, long maxSequentialSkipInIterations); private native long maxSequentialSkipInIterations(long handle); private native void setMemTableFactory(long handle, long factoryHandle); private native String memTableFactoryName(long handle); private native void setTableFactory(long handle, long factoryHandle); private native String tableFactoryName(long handle); private native void setInplaceUpdateSupport( long handle, boolean inplaceUpdateSupport); private native boolean inplaceUpdateSupport(long handle); private native void setInplaceUpdateNumLocks( long handle, long inplaceUpdateNumLocks) throws IllegalArgumentException; private native long inplaceUpdateNumLocks(long handle); private native void setMemtablePrefixBloomBits( long handle, int memtablePrefixBloomBits); private native int memtablePrefixBloomBits(long handle); private native void setMemtablePrefixBloomProbes( long handle, int memtablePrefixBloomProbes); private native int memtablePrefixBloomProbes(long handle); private native void setBloomLocality( long handle, int bloomLocality); private native int bloomLocality(long handle); private native void setMaxSuccessiveMerges( long handle, long maxSuccessiveMerges) throws IllegalArgumentException; private native long maxSuccessiveMerges(long handle); private native void setMinPartialMergeOperands( long handle, int minPartialMergeOperands); private native int minPartialMergeOperands(long handle); private native void setOptimizeFiltersForHits(long handle, boolean optimizeFiltersForHits); private native boolean optimizeFiltersForHits(long handle); // instance variables Env env_; MemTableConfig memTableConfig_; TableFormatConfig tableFormatConfig_; RateLimiterConfig rateLimiterConfig_; AbstractComparator<? extends AbstractSlice<?>> comparator_; }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a * Geohash prefix is defined by the filter and all geohashes that are matching this * prefix will be returned. The <code>neighbors</code> flag allows to filter * geohashes that surround the given geohash. In general the neighborhood of a * geohash is defined by its eight adjacent cells.<br /> * The structure of the {@link GeohashCellQuery} is defined as: * <pre> * &quot;geohash_bbox&quot; { * &quot;field&quot;:&quot;location&quot;, * &quot;geohash&quot;:&quot;u33d8u5dkx8k&quot;, * &quot;neighbors&quot;:false * } * </pre> */ public class GeohashCellQuery { public static final String NAME = "geohash_cell"; public static final String NEIGHBORS = "neighbors"; public static final String PRECISION = "precision"; /** * Create a new geohash filter for a given set of geohashes. In general this method * returns a boolean filter combining the geohashes OR-wise. * * @param context Context of the filter * @param fieldType field mapper for geopoints * @param geohash mandatory geohash * @param geohashes optional array of additional geohashes * @return a new GeoBoundinboxfilter */ public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) { MappedFieldType geoHashMapper = fieldType.geohashFieldType(); if (geoHashMapper == null) { throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); } if (geohashes == null || geohashes.size() == 0) { return geoHashMapper.termQuery(geohash, context); } else { geohashes.add(geohash); return geoHashMapper.termsQuery(geohashes, context); } } /** * Builder for a geohashfilter. It needs the fields <code>fieldname</code> and * <code>geohash</code> to be set. the default for a neighbor filteing is * <code>false</code>. */ public static class Builder extends QueryBuilder { // we need to store the geohash rather than the corresponding point, // because a transformation from a geohash to a point an back to the // geohash will extend the accuracy of the hash to max precision // i.e. by filing up with z's. private String field; private String geohash; private int levels = -1; private boolean neighbors; public Builder(String field) { this(field, null, false); } public Builder(String field, GeoPoint point) { this(field, point.geohash(), false); } public Builder(String field, String geohash) { this(field, geohash, false); } public Builder(String field, String geohash, boolean neighbors) { super(); this.field = field; this.geohash = geohash; this.neighbors = neighbors; } public Builder point(GeoPoint point) { this.geohash = point.getGeohash(); return this; } public Builder point(double lat, double lon) { this.geohash = GeoHashUtils.encode(lat, lon); return this; } public Builder geohash(String geohash) { this.geohash = geohash; return this; } public Builder precision(int levels) { this.levels = levels; return this; } public Builder precision(String precision) { double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS); return precision(GeoUtils.geoHashLevelsForPrecision(meters)); } public Builder neighbors(boolean neighbors) { this.neighbors = neighbors; return this; } public Builder field(String field) { this.field = field; return this; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); if (neighbors) { builder.field(NEIGHBORS, neighbors); } if(levels > 0) { builder.field(PRECISION, levels); } builder.field(field, geohash); builder.endObject(); } } public static class Parser implements QueryParser { @Inject public Parser() { } @Override public String[] names() { return new String[]{NAME, Strings.toCamelCase(NAME)}; } @Override public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); String fieldName = null; String geohash = null; int levels = -1; boolean neighbors = false; XContentParser.Token token; if ((token = parser.currentToken()) != Token.START_OBJECT) { throw new ElasticsearchParseException("failed to parse [{}] query. expected an object but found [{}] instead", NAME, token); } while ((token = parser.nextToken()) != Token.END_OBJECT) { if (token == Token.FIELD_NAME) { String field = parser.text(); if (parseContext.isDeprecatedSetting(field)) { // skip } else if (PRECISION.equals(field)) { token = parser.nextToken(); if(token == Token.VALUE_NUMBER) { levels = parser.intValue(); } else if(token == Token.VALUE_STRING) { double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS); levels = GeoUtils.geoHashLevelsForPrecision(meters); } } else if (NEIGHBORS.equals(field)) { parser.nextToken(); neighbors = parser.booleanValue(); } else { fieldName = field; token = parser.nextToken(); if(token == Token.VALUE_STRING) { // A string indicates either a gehash or a lat/lon string String location = parser.text(); if(location.indexOf(",")>0) { geohash = GeoUtils.parseGeoPoint(parser).geohash(); } else { geohash = location; } } else { geohash = GeoUtils.parseGeoPoint(parser).geohash(); } } } else { throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token); } } if (geohash == null) { throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME); } MappedFieldType fieldType = parseContext.fieldMapper(fieldName); if (fieldType == null) { throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName); } if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) { throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); } GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType); if (!geoFieldType.isGeohashPrefixEnabled()) { throw new QueryParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName); } if(levels > 0) { int len = Math.min(levels, geohash.length()); geohash = geohash.substring(0, len); } Query filter; if (neighbors) { filter = create(parseContext, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8))); } else { filter = create(parseContext, geoFieldType, geohash, null); } return filter; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.tools; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Directories; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.utils.OutputHandler; import org.apache.commons.cli.*; import java.io.File; import java.io.IOException; import java.util.function.BiFunction; import java.util.function.BiPredicate; import static org.apache.cassandra.tools.BulkLoader.CmdLineOptions; public class StandaloneSSTableUtil { private static final String TOOL_NAME = "sstableutil"; private static final String TYPE_OPTION = "type"; private static final String OP_LOG_OPTION = "oplog"; private static final String VERBOSE_OPTION = "verbose"; private static final String DEBUG_OPTION = "debug"; private static final String HELP_OPTION = "help"; private static final String CLEANUP_OPTION = "cleanup"; public static void main(String args[]) { Options options = Options.parseArgs(args); try { // load keyspace descriptions. Util.initDatabaseDescriptor(); Schema.instance.loadFromDisk(false); TableMetadata metadata = Schema.instance.getTableMetadata(options.keyspaceName, options.cfName); if (metadata == null) throw new IllegalArgumentException(String.format("Unknown keyspace/table %s.%s", options.keyspaceName, options.cfName)); OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug); if (options.cleanup) { handler.output("Cleanuping up..."); LifecycleTransaction.removeUnfinishedLeftovers(metadata); } else { handler.output("Listing files..."); listFiles(options, metadata, handler); } System.exit(0); } catch (Exception e) { System.err.println(e.getMessage()); if (options.debug) e.printStackTrace(System.err); System.exit(1); } } private static void listFiles(Options options, TableMetadata metadata, OutputHandler handler) throws IOException { Directories directories = new Directories(metadata); for (File dir : directories.getCFDirectories()) { for (File file : LifecycleTransaction.getFiles(dir.toPath(), getFilter(options), Directories.OnTxnErr.THROW)) handler.output(file.getCanonicalPath()); } } private static BiPredicate<File, Directories.FileType> getFilter(Options options) { return (file, type) -> { switch(type) { case FINAL: return options.type != Options.FileType.TMP; case TEMPORARY: return options.type != Options.FileType.FINAL; case TXN_LOG: return options.oplogs; default: throw new AssertionError(); } }; } private static class Options { public enum FileType { ALL("all", "list all files, final or temporary"), TMP("tmp", "list temporary files only"), FINAL("final", "list final files only"); public String option; public String descr; FileType(String option, String descr) { this.option = option; this.descr = descr; } static FileType fromOption(String option) { for (FileType fileType : FileType.values()) { if (fileType.option.equals(option)) return fileType; } return FileType.ALL; } static String descr() { StringBuilder str = new StringBuilder(); for (FileType fileType : FileType.values()) { str.append(fileType.option); str.append(" ("); str.append(fileType.descr); str.append("), "); } return str.toString(); } } public final String keyspaceName; public final String cfName; public boolean debug; public boolean verbose; public boolean oplogs; public boolean cleanup; public FileType type; private Options(String keyspaceName, String cfName) { this.keyspaceName = keyspaceName; this.cfName = cfName; } public static Options parseArgs(String cmdArgs[]) { CommandLineParser parser = new GnuParser(); CmdLineOptions options = getCmdLineOptions(); try { CommandLine cmd = parser.parse(options, cmdArgs, false); if (cmd.hasOption(HELP_OPTION)) { printUsage(options); System.exit(0); } String[] args = cmd.getArgs(); if (args.length != 2) { String msg = args.length < 2 ? "Missing arguments" : "Too many arguments"; System.err.println(msg); printUsage(options); System.exit(1); } String keyspaceName = args[0]; String cfName = args[1]; Options opts = new Options(keyspaceName, cfName); opts.debug = cmd.hasOption(DEBUG_OPTION); opts.verbose = cmd.hasOption(VERBOSE_OPTION); opts.type = FileType.fromOption(cmd.getOptionValue(TYPE_OPTION)); opts.oplogs = cmd.hasOption(OP_LOG_OPTION); opts.cleanup = cmd.hasOption(CLEANUP_OPTION); return opts; } catch (ParseException e) { errorMsg(e.getMessage(), options); return null; } } private static void errorMsg(String msg, CmdLineOptions options) { System.err.println(msg); printUsage(options); System.exit(1); } private static CmdLineOptions getCmdLineOptions() { CmdLineOptions options = new CmdLineOptions(); options.addOption("c", CLEANUP_OPTION, "clean-up any outstanding transactions"); options.addOption("d", DEBUG_OPTION, "display stack traces"); options.addOption("h", HELP_OPTION, "display this help message"); options.addOption("o", OP_LOG_OPTION, "include operation logs"); options.addOption("t", TYPE_OPTION, true, FileType.descr()); options.addOption("v", VERBOSE_OPTION, "verbose output"); return options; } public static void printUsage(CmdLineOptions options) { String usage = String.format("%s [options] <keyspace> <column_family>", TOOL_NAME); StringBuilder header = new StringBuilder(); header.append("--\n"); header.append("List sstable files for the provided table." ); header.append("\n--\n"); header.append("Options are:"); new HelpFormatter().printHelp(usage, header.toString(), options, ""); } } }
/******************************************************************************* * Copyright (c) 2006, 2012 Oracle Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Oracle Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.bpel.ui.editors.xpath; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.eclipse.bpel.ui.editors.xpath.rules.AxisRule; import org.eclipse.bpel.ui.editors.xpath.rules.FloatRule; import org.eclipse.bpel.ui.editors.xpath.rules.FunctionRule; import org.eclipse.bpel.ui.editors.xpath.rules.ITokenContext; import org.eclipse.bpel.ui.editors.xpath.rules.SingleCharRule; import org.eclipse.bpel.ui.editors.xpath.rules.SingleOperatorRule; import org.eclipse.bpel.ui.editors.xpath.rules.StringRule; import org.eclipse.bpel.ui.editors.xpath.rules.WordRule; import org.eclipse.bpel.ui.preferences.PreferenceConstants; import org.eclipse.jface.text.TextAttribute; import org.eclipse.jface.text.rules.BufferedRuleBasedScanner; import org.eclipse.jface.text.rules.ICharacterScanner; import org.eclipse.jface.text.rules.IRule; import org.eclipse.jface.text.rules.IToken; import org.eclipse.jface.text.rules.IWordDetector; import org.eclipse.jface.text.rules.SingleLineRule; import org.eclipse.jface.text.rules.Token; import org.eclipse.jface.text.rules.WhitespaceRule; import org.eclipse.swt.SWT; /** * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Oct 25, 2006 */ public class XPathSourceScanner extends BufferedRuleBasedScanner { IWordDetector fNCNameDetector = new XPathWordDetector.NCNameWordDetector (); IWordDetector fWordDetector = new XPathWordDetector(); IWordDetector fVariableNameDetector = new XPathWordDetector.VariableDetector(); IWordDetector fQNameDetector = new XPathWordDetector.QNameDetector (); LinkedList<IToken> tokenWindow = new LinkedList<IToken>(); /** * The scanner for the XPath source editor, which provides * syntax coloring based on the default damager and repairer. * * @param manager */ @SuppressWarnings("nls") public XPathSourceScanner( ColorManager manager ) { IToken defToken = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.DEFAULT ) ) ); final IToken operatorToken = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.OPERAND ), null, SWT.BOLD ) ); IToken number = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.NUMBER ), null, SWT.BOLD ) ); IToken string = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.STRING ) ) ) ; IToken brackets = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.BRACKET ), null, SWT.BOLD ) ); IToken axis = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.AXIS ), null, SWT.ITALIC ) ); IToken pathSep = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.PATH_SEPARATOR ), null, SWT.BOLD ) ); IToken functionsDefault = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.FUNCTIONS_XPATH ), null, SWT.ITALIC ) ); IToken functions = new Token ( new TextAttribute ( manager.getColor( PreferenceConstants.FUNCTIONS_XPATH ), null, SWT.BOLD) ) ; final IToken variableToken = new Token ( new TextAttribute( manager.getColor( PreferenceConstants.VARIABLES ), null, SWT.BOLD )); IToken partToken = new Token ( new TextAttribute( manager.getColor( PreferenceConstants.VARIABLE_PART ), null, SWT.BOLD )); // The list of rules for this scanner. List<IRule> rules = new ArrayList<IRule>(24); // Add rule for double quotes string rules.add( new SingleLineRule("\"", "\"", string , '\\') ); //$NON-NLS-1$ //$NON-NLS-2$ // Add a rule for single quotes string rules.add( new SingleLineRule("'", "'", string , '\\') ); //$NON-NLS-1$ //$NON-NLS-2$ // Add function calls ... // Add generic whitespace rule. rules.add( new WhitespaceRule(new XPathWhitespaceDetector()) ); // numbers rules.add ( new FloatRule ( number )) ; WordRule wordRule; // variable rule wordRule = new WordRule ( fVariableNameDetector ); wordRule.addWord ( WordRule.ANY , variableToken ); rules.add (wordRule); // Variable part rule wordRule = new WordRule ( new XPathWordDetector.MessagePartDetector() ); wordRule.addWord ( WordRule.ANY , partToken ); wordRule.setTokenContextCheck( new TokenContext () { @Override public boolean checkSeenTokens(XPathSourceScanner scanner) { return (scanner.lastToken(0) == variableToken); } }); rules.add (wordRule); // Some operators. rules.add ( new SingleOperatorRule ( operatorToken, "+-*=|/<>" ) ); // Operators of sorts ... rules.add ( new StringRule ( operatorToken, "!=") ); rules.add ( new StringRule ( operatorToken, ">=") ); rules.add ( new StringRule ( operatorToken, "<=") ); rules.add ( new StringRule ( operatorToken, ">=") ); rules.add ( new SingleCharRule ( brackets, "[]().@," ) ); rules.add ( new StringRule ( operatorToken, "//") ); rules.add ( new StringRule ( pathSep, "::") ); // rule for operators ... wordRule = new WordRule( new XPathWordDetector () ); wordRule.addWord ("mod",operatorToken); wordRule.addWord ("div",operatorToken); wordRule.addWord ("and",operatorToken); wordRule.addWord ("or",operatorToken); wordRule.setTokenContextCheck( new TokenContext() { @Override public boolean checkSeenTokens(XPathSourceScanner scanner) { int idx = (scanner.lastToken(0) == Token.WHITESPACE ? 1 : 0); return scanner.lastToken(idx) != operatorToken; } }); rules.add( wordRule ); AxisRule axisRule = new AxisRule ( fNCNameDetector ); axisRule.addWords(AXIS, axis); rules.add(axisRule); // The basic XPath functions FunctionRule functionRule = new FunctionRule ( fQNameDetector ); functionRule.addWords(XPATH_FUNCTIONS,functionsDefault); rules.add(functionRule); // All other functions functionRule = new FunctionRule ( fQNameDetector ); functionRule.addWord( WordRule.ANY,functions); rules.add(functionRule); wordRule = new WordRule( fWordDetector ); wordRule.addWord ( WordRule.ANY, defToken ); rules.add( wordRule ); setDefaultReturnToken( defToken ) ; setRules ( rules.toArray(new IRule[]{} )); } /** * * @see org.eclipse.jface.text.rules.RuleBasedScanner#nextToken() */ @Override public IToken nextToken() { IToken next = super.nextToken(); tokenWindow.addFirst(next); if (tokenWindow.size() > 4) { tokenWindow.removeLast(); } return next; } /** * Returns the last token with the index of offset. Index 0 means the last token seen, * 1 means the one before the last token seen. * * @param offset * @return the token requested or undefined. */ public IToken lastToken ( int offset ) { try { return tokenWindow.get(offset); } catch (Throwable t) { return Token.UNDEFINED; } } static private final String[] XPATH_FUNCTIONS = { "last","position","count","id","local-name","namespace-uri","name", "string","concat","starts-with","contains","substring-before","substring-after", "substring","string-length","normalize-space","translate", "boolean","not","true","false","lang", "number","sum","floor","ceiling","round" }; static private final String[] AXIS = { "ancestor", "ancestor-or-self", "attribute", "child", "descendant", "descendant-or-self", "following", "following-sibling", "namespace", "parent", "preceding", "preceding-sibling", "self" }; /** * The TokenContext class allows us to see what tokens we have seen * so far. In some syntax coloring constructs we need to have a memory * (albeit a simple one) of where we have been. * * @author Michal Chmielewski (michal.chmielewski@oracle.com) * @date Nov 27, 2006 */ abstract class TokenContext implements ITokenContext { /** (non-Javadoc) * @see org.eclipse.bpel.ui.editors.xpath.rules.ITokenContext#check(org.eclipse.jface.text.rules.ICharacterScanner) */ public boolean check (ICharacterScanner scanner) { if (scanner instanceof XPathSourceScanner) { return checkSeenTokens ( (XPathSourceScanner) scanner); } return false; } /** * * @param scanner * @return true if the right tokens have been seen so far, false * otherwise. * */ public abstract boolean checkSeenTokens ( XPathSourceScanner scanner ) ; } }
package uk.co.jemos.podam.typeManufacturers; import org.apache.commons.lang3.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.co.jemos.podam.api.DataProviderStrategy; import uk.co.jemos.podam.api.ObjectStrategy; import uk.co.jemos.podam.api.PodamUtils; import uk.co.jemos.podam.common.*; import javax.validation.Constraint; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import javax.xml.ws.Holder; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.util.*; import java.util.concurrent.atomic.AtomicReference; /** * Type Manufacturer utility class. * * Created by tedonema on 01/07/2015. * * @since 6.0.0.RELEASE */ public abstract class TypeManufacturerUtil { /** The application logger */ private static final Logger LOG = LoggerFactory.getLogger(TypeManufacturerUtil.class); /** * It returns a {@link AttributeStrategy} if one was specified in * annotations, or {@code null} otherwise. * * @param strategy * The data provider strategy * @param annotations * The list of annotations, irrelevant annotations will be removed * @param attributeType * Type of attribute expected to be returned * @return {@link AttributeStrategy}, if {@link PodamStrategyValue} or bean * validation constraint annotation was found among annotations * @throws IllegalAccessException * if attribute strategy cannot be instantiated * @throws InstantiationException * if attribute strategy cannot be instantiated * @throws SecurityException * if access security is violated * @throws InvocationTargetException * if invocation failed * @throws IllegalArgumentException * if illegal argument provided to a constructor */ public static AttributeStrategy<?> findAttributeStrategy(DataProviderStrategy strategy, List<Annotation> annotations, Class<?> attributeType) throws InstantiationException, IllegalAccessException, SecurityException, IllegalArgumentException, InvocationTargetException { List<Annotation> localAnnotations = new ArrayList<Annotation>(annotations); Iterator<Annotation> iter = localAnnotations.iterator(); while (iter.hasNext()) { Annotation annotation = iter.next(); if (annotation instanceof PodamStrategyValue) { PodamStrategyValue strategyAnnotation = (PodamStrategyValue) annotation; return strategyAnnotation.value().newInstance(); } /* Podam annotation is present, this will be handled later by type manufacturers */ if (annotation.annotationType().getAnnotation(PodamAnnotation.class) != null) { return null; } /* Find real class out of proxy */ Class<? extends Annotation> annotationClass = annotation.getClass(); if (Proxy.isProxyClass(annotationClass)) { Class<?>[] interfaces = annotationClass.getInterfaces(); if (interfaces.length == 1) { @SuppressWarnings("unchecked") Class<? extends Annotation> tmp = (Class<? extends Annotation>) interfaces[0]; annotationClass = tmp; } } AttributeStrategy<?> attrStrategy = strategy.getStrategyForAnnotation(annotationClass); if (null != attrStrategy) { return attrStrategy; } if (annotation.annotationType().getAnnotation(Constraint.class) != null) { if (annotation instanceof NotNull || annotation.annotationType().getName().equals("org.hibernate.validator.constraints.NotEmpty") || annotation.annotationType().getName().equals("org.hibernate.validator.constraints.NotBlank")) { /* We don't need to do anything for NotNull constraint */ iter.remove(); } else if (!NotNull.class.getPackage().equals(annotationClass.getPackage())) { LOG.warn("Please, register AttributeStratergy for custom " + "constraint {}, in DataProviderStrategy! Value " + "will be left to null", annotation); } } else { iter.remove(); } } AttributeStrategy<?> retValue = null; if (!localAnnotations.isEmpty() && !Collection.class.isAssignableFrom(attributeType) && !Map.class.isAssignableFrom(attributeType) && !attributeType.isArray()) { retValue = new BeanValidationStrategy(attributeType); } return retValue; } /** * Finds suitable static constructors for POJO instantiation * <p> * This method places required and provided types for object creation into a * map, which will be used for type mapping. * </p> * * @param factoryClass * Factory class to produce the POJO * @param pojoClass * Typed class * @return an array of suitable static constructors found */ public static Method[] findSuitableConstructors(final Class<?> factoryClass, final Class<?> pojoClass) { // If no publicly accessible constructors are available, // the best we can do is to find a constructor (e.g. // getInstance()) Method[] declaredMethods = factoryClass.getDeclaredMethods(); List<Method> constructors = new ArrayList<Method>(); // A candidate factory method is a method which returns the // Class type for (Method candidateConstructor : declaredMethods) { if (candidateConstructor.getReturnType().equals(pojoClass)) { if (Modifier.isStatic(candidateConstructor.getModifiers()) || !factoryClass.equals(pojoClass)) { constructors.add(candidateConstructor); } } } return constructors.toArray(new Method[constructors.size()]); } /** * Fills type agruments map * <p> * This method places required and provided types for object creation into a * map, which will be used for type mapping. * </p> * * @param typeArgsMap * a map to fill * @param pojoClass * Typed class * @param genericTypeArgs * Type arguments provided for a generics object by caller * @return Array of unused provided generic type arguments * @throws IllegalStateException * If number of typed parameters doesn't match number of * provided generic types */ public static Type[] fillTypeArgMap(final Map<String, Type> typeArgsMap, final Class<?> pojoClass, final Type[] genericTypeArgs) { TypeVariable<?>[] array = pojoClass.getTypeParameters(); List<TypeVariable<?>> typeParameters = new ArrayList<TypeVariable<?>>(Arrays.asList(array)); Iterator<TypeVariable<?>> iterator = typeParameters.iterator(); /* Removing types, which are already in typeArgsMap */ while (iterator.hasNext()) { if (typeArgsMap.containsKey(iterator.next().getName())) { iterator.remove(); } } List<Type> genericTypes = new ArrayList<Type>(Arrays.asList(genericTypeArgs)); Iterator<Type> iterator2 = genericTypes.iterator(); /* Removing types, which are type variables */ while (iterator2.hasNext()) { if (iterator2.next() instanceof TypeVariable) { iterator2.remove(); } } if (typeParameters.size() > genericTypes.size()) { String msg = pojoClass.getCanonicalName() + " is missing generic type arguments, expected " + typeParameters + ", provided " + Arrays.toString(genericTypeArgs); throw new IllegalArgumentException(msg); } final Method[] suitableConstructors = TypeManufacturerUtil.findSuitableConstructors(pojoClass, pojoClass); for (Method constructor : suitableConstructors) { TypeVariable<Method>[] ctorTypeParams = constructor.getTypeParameters(); if (ctorTypeParams.length == genericTypes.size()) { for (int i = 0; i < ctorTypeParams.length; i++) { Type foundType = genericTypes.get(i); typeArgsMap.put(ctorTypeParams[i].getName(), foundType); } } } for (int i = 0; i < typeParameters.size(); i++) { Type foundType = genericTypes.remove(0); typeArgsMap.put(typeParameters.get(i).getName(), foundType); } Type[] genericTypeArgsExtra; if (genericTypes.size() > 0) { genericTypeArgsExtra = genericTypes.toArray(new Type[genericTypes.size()]); } else { genericTypeArgsExtra = PodamConstants.NO_TYPES; } /* Adding types, which were specified during inheritance */ Class<?> clazz = pojoClass; while (clazz != null) { Type superType = clazz.getGenericSuperclass(); clazz = clazz.getSuperclass(); if (superType instanceof ParameterizedType) { ParameterizedType paramType = (ParameterizedType) superType; Type[] actualParamTypes = paramType.getActualTypeArguments(); TypeVariable<?>[] paramTypes = clazz.getTypeParameters(); for (int i = 0; i < actualParamTypes.length && i < paramTypes.length; i++) { if (actualParamTypes[i] instanceof Class) { typeArgsMap.put(paramTypes[i].getName(), actualParamTypes[i]); } } } } return genericTypeArgsExtra; } /** * Searches for annotation with information about collection/map size * and filling strategies * * @param strategy * a data provider strategy * @param annotations * a list of annotations to inspect * @param collectionElementType * a collection element type * @param elementStrategyHolder * a holder to pass found element strategy back to the caller, * can be null * @param keyStrategyHolder * a holder to pass found key strategy back to the caller, * can be null * @return * A number of element in collection or null, if no annotation was * found * @throws InstantiationException * A strategy cannot be instantiated * @throws IllegalAccessException * A strategy cannot be instantiated */ public static Integer findCollectionSize( DataProviderStrategy strategy, List<Annotation> annotations, Class<?> collectionElementType, Holder<AttributeStrategy<?>> elementStrategyHolder, Holder<AttributeStrategy<?>> keyStrategyHolder) throws InstantiationException, IllegalAccessException { // If the user defined a strategy to fill the collection elements, // we use it Size size = null; for (Annotation annotation : annotations) { if (annotation instanceof PodamCollection) { PodamCollection collectionAnnotation = (PodamCollection) annotation; if (null != elementStrategyHolder) { Class<? extends AttributeStrategy<?>> attributeStrategy = collectionAnnotation.collectionElementStrategy(); if (null == attributeStrategy || ObjectStrategy.class.isAssignableFrom(attributeStrategy)) { attributeStrategy = collectionAnnotation.mapElementStrategy(); } if (null != attributeStrategy) { elementStrategyHolder.value = attributeStrategy.newInstance(); } } if (null != keyStrategyHolder) { Class<? extends AttributeStrategy<?>> attributeStrategy = collectionAnnotation.mapKeyStrategy(); if (null != attributeStrategy) { keyStrategyHolder.value = attributeStrategy.newInstance(); } } return collectionAnnotation.nbrElements(); } else if (annotation instanceof Size) { size = (Size) annotation; } } Integer nbrElements = strategy .getNumberOfCollectionElements(collectionElementType); if (null != size) { if (nbrElements > size.max()) { nbrElements = size.max(); } if (nbrElements < size.min()) { nbrElements = size.min(); } } return nbrElements; } /** * Utility to merge actual types with supplied array of generic type * substitutions * * @param attributeType * actual type of object * @param genericAttributeType * generic type of object * @param suppliedTypes * an array of supplied types for generic type substitution * @param typeArgsMap * a map relating the generic class arguments ("&lt;T, V&gt;" for * example) with their actual types * @return An array of merged actual and supplied types with generic types * resolved */ public static Type[] mergeActualAndSuppliedGenericTypes( Class<?> attributeType, Type genericAttributeType, Type[] suppliedTypes, Map<String, Type> typeArgsMap) { TypeVariable<?>[] actualTypes = attributeType.getTypeParameters(); if (actualTypes.length <= suppliedTypes.length) { return suppliedTypes; } Type[] genericTypes = null; if (genericAttributeType instanceof ParameterizedType) { ParameterizedType paramType = (ParameterizedType) genericAttributeType; genericTypes = paramType.getActualTypeArguments(); } else if (genericAttributeType instanceof WildcardType) { WildcardType wildcardType = (WildcardType) genericAttributeType; genericTypes = wildcardType.getLowerBounds(); if (ArrayUtils.isEmpty(genericTypes)) { genericTypes = wildcardType.getUpperBounds(); } } List<Type> resolvedTypes = new ArrayList<Type>(); List<Type> substitutionTypes = new ArrayList<Type>(Arrays.asList(suppliedTypes)); for (int i = 0; i < actualTypes.length; i++) { Type type = null; if (actualTypes[i] instanceof TypeVariable) { type = typeArgsMap.get(((TypeVariable<?>)actualTypes[i]).getName()); } else if (actualTypes[i] instanceof WildcardType) { AtomicReference<Type[]> methodGenericTypeArgs = new AtomicReference<Type[]>(PodamConstants.NO_TYPES); type = TypeManufacturerUtil.resolveGenericParameter(actualTypes[i], typeArgsMap, methodGenericTypeArgs); } if ((type == null) && (genericTypes != null)) { if (genericTypes[i] instanceof Class) { type = genericTypes[i]; } else if (genericTypes[i] instanceof WildcardType) { AtomicReference<Type[]> methodGenericTypeArgs = new AtomicReference<Type[]>(PodamConstants.NO_TYPES); type = resolveGenericParameter(genericTypes[i], typeArgsMap, methodGenericTypeArgs); } else if (genericTypes[i] instanceof ParameterizedType) { type = genericTypes[i]; } else { LOG.debug("Skipping type {} {}", actualTypes[i], genericTypes[i]); } } if (type != null) { resolvedTypes.add(type); if (!substitutionTypes.isEmpty() && substitutionTypes.get(0).equals(type)) { substitutionTypes.remove(0); } } } Type[] resolved = resolvedTypes.toArray(new Type[resolvedTypes.size()]); Type[] supplied = substitutionTypes.toArray(new Type[substitutionTypes.size()]); return ArrayUtils.addAll(resolved, supplied); } /** * It resolves generic parameter type * * * @param paramType * The generic parameter type * @param typeArgsMap * A map of resolved types * @param methodGenericTypeArgs * Return value posible generic types of the generic parameter * type * @return value for class representing the generic parameter type */ public static Class<?> resolveGenericParameter(Type paramType, Map<String, Type> typeArgsMap, AtomicReference<Type[]> methodGenericTypeArgs) { Class<?> parameterType = null; //Safe copy Map<String, Type> localMap = new HashMap<String, Type>(typeArgsMap); methodGenericTypeArgs.set(PodamConstants.NO_TYPES); if (paramType instanceof Class) { parameterType = (Class<?>) paramType; } else if (paramType instanceof TypeVariable<?>) { final TypeVariable<?> typeVariable = (TypeVariable<?>) paramType; final Type type = localMap.get(typeVariable.getName()); if (type != null) { parameterType = resolveGenericParameter(type, localMap, methodGenericTypeArgs); } } else if (paramType instanceof ParameterizedType) { ParameterizedType pType = (ParameterizedType) paramType; parameterType = (Class<?>) pType.getRawType(); Type[] actualTypeArgs = pType.getActualTypeArguments(); if (!typeArgsMap.isEmpty()) { for (int i = 0; i < actualTypeArgs.length; i++) { Class<?> tmp = resolveGenericParameter(actualTypeArgs[i], localMap, methodGenericTypeArgs); if (tmp != actualTypeArgs[i]) { /* If actual type argument has its own arguments, * we will loose them now, so we will leave type unresolved * until lower levels of type resolution */ if (ArrayUtils.isEmpty(methodGenericTypeArgs.get())) { actualTypeArgs[i] = tmp; } } } } methodGenericTypeArgs.set(actualTypeArgs); } else if (paramType instanceof WildcardType) { WildcardType wType = (WildcardType) paramType; Type[] bounds = wType.getLowerBounds(); String msg; if (ArrayUtils.isNotEmpty(bounds)) { msg = "Lower bounds:"; } else { bounds = wType.getUpperBounds(); msg = "Upper bounds:"; } if (ArrayUtils.isNotEmpty(bounds)) { LOG.debug(msg + Arrays.toString(bounds)); parameterType = resolveGenericParameter(bounds[0], localMap, methodGenericTypeArgs); } } if (parameterType == null) { LOG.warn("Unrecognized type {}. Will use Object instead", paramType); parameterType = Object.class; } return parameterType; } /** * It retrieves the value for the {@link PodamStrategyValue} annotation with * which the attribute was annotated * * @param attributeType * The attribute type, used for type checking * @param annotations * Annotations attached to the attribute * @param attributeStrategy * The {@link AttributeStrategy} to use * @return The value for the {@link PodamStrategyValue} annotation with * which the attribute was annotated * @throws IllegalArgumentException * If the type of the data strategy defined for the * {@link PodamStrategyValue} annotation is not assignable to * the annotated attribute. This de facto guarantees type * safety. */ public static Object returnAttributeDataStrategyValue(Class<?> attributeType, List<Annotation> annotations, AttributeStrategy<?> attributeStrategy) throws IllegalArgumentException { if (null == attributeStrategy) { return null; } Object retValue = attributeStrategy.getValue(attributeType, annotations); if (retValue != null) { Class<?> desiredType = attributeType.isPrimitive() ? PodamUtils.primitiveToBoxedType(attributeType) : attributeType; if (!desiredType.isAssignableFrom(retValue.getClass())) { String errMsg = "The AttributeStrategy " + attributeStrategy.getClass().getName() + " produced value of type " + retValue.getClass().getName() + " incompatible with attribute type " + attributeType.getName(); throw new IllegalArgumentException(errMsg); } else { LOG.debug("The parameter {} will be filled using the following strategy {}", attributeType, attributeStrategy); } } return retValue; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.network; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.broker.jmx.ManagementContext; import org.apache.activemq.util.TestUtils; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jms.Connection; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import javax.management.MBeanServer; import javax.management.ObjectInstance; import javax.management.ObjectName; import java.net.MalformedURLException; import java.util.List; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assume.assumeNotNull; public class DuplexNetworkMBeanTest { protected static final Logger LOG = LoggerFactory.getLogger(DuplexNetworkMBeanTest.class); protected final int numRestarts = 3; private int primaryBrokerPort; private int secondaryBrokerPort; private MBeanServer mBeanServer = new ManagementContext().getMBeanServer(); @Before public void setUp() throws Exception { List<Integer> ports = TestUtils.findOpenPorts(2); primaryBrokerPort = ports.get(0); secondaryBrokerPort = ports.get(1); } protected BrokerService createBroker() throws Exception { BrokerService broker = new BrokerService(); broker.setBrokerName("broker"); broker.getManagementContext().setCreateConnector(false); broker.addConnector("tcp://localhost:" + primaryBrokerPort + "?transport.reuseAddress=true"); return broker; } protected BrokerService createNetworkedBroker() throws Exception { BrokerService broker = new BrokerService(); broker.setBrokerName("networkedBroker"); broker.addConnector("tcp://localhost:" + secondaryBrokerPort + "?transport.reuseAddress=true"); broker.getManagementContext().setCreateConnector(false); NetworkConnector networkConnector = broker.addNetworkConnector("static:(tcp://localhost:" + primaryBrokerPort + "?wireFormat.maxInactivityDuration=500)?useExponentialBackOff=false"); networkConnector.setDuplex(true); return broker; } @Test public void testMbeanPresenceOnNetworkBrokerRestart() throws Exception { BrokerService broker = createBroker(); try { broker.start(); assertEquals(1, countMbeans(broker, "connector", 30000)); assertEquals(0, countMbeans(broker, "connectionName")); BrokerService networkedBroker = null; for (int i=0; i<numRestarts; i++) { networkedBroker = createNetworkedBroker(); try { networkedBroker.start(); assertEquals(1, countMbeans(networkedBroker, "networkBridge", 2000)); assertEquals(1, countMbeans(broker, "networkBridge", 2000)); assertEquals(2, countMbeans(broker, "connectionName")); } finally { networkedBroker.stop(); networkedBroker.waitUntilStopped(); } assertEquals(0, countMbeans(networkedBroker, "stopped")); assertEquals(0, countMbeans(broker, "networkBridge")); } assertEquals(0, countMbeans(networkedBroker, "networkBridge")); assertEquals(0, countMbeans(networkedBroker, "connector")); assertEquals(0, countMbeans(networkedBroker, "connectionName")); assertEquals(1, countMbeans(broker, "connector")); } finally { broker.stop(); broker.waitUntilStopped(); } } @Test public void testMbeanPresenceOnBrokerRestart() throws Exception { BrokerService networkedBroker = createNetworkedBroker(); try { networkedBroker.start(); assertEquals(1, countMbeans(networkedBroker, "connector=networkConnectors", 30000)); assertEquals(0, countMbeans(networkedBroker, "connectionName")); BrokerService broker = null; for (int i=0; i<numRestarts; i++) { broker = createBroker(); try { broker.start(); assertEquals(1, countMbeans(networkedBroker, "networkBridge", 5000)); assertEquals("restart number: " + i, 2, countMbeans(broker, "connectionName", 10000)); } finally { broker.stop(); broker.waitUntilStopped(); } assertEquals(0, countMbeans(broker, "stopped")); } assertEquals(1, countMbeans(networkedBroker, "connector=networkConnectors")); assertEquals(0, countMbeans(networkedBroker, "connectionName")); assertEquals(0, countMbeans(broker, "connectionName")); } finally { networkedBroker.stop(); networkedBroker.waitUntilStopped(); } } @Test public void testMBeansNotOverwrittenOnCleanup() throws Exception { BrokerService broker = createBroker(); BrokerService networkedBroker = createNetworkedBroker(); MessageProducer producerBroker = null; MessageConsumer consumerBroker = null; Session sessionNetworkBroker = null; Session sessionBroker = null; MessageProducer producerNetworkBroker = null; MessageConsumer consumerNetworkBroker = null; try { broker.start(); broker.waitUntilStarted(); networkedBroker.start(); try { assertEquals(2, countMbeans(networkedBroker, "connector=networkConnectors", 10000)); assertEquals(1, countMbeans(broker, "connector=duplexNetworkConnectors", 10000)); Connection brokerConnection = new ActiveMQConnectionFactory(broker.getVmConnectorURI()).createConnection(); brokerConnection.start(); sessionBroker = brokerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); producerBroker = sessionBroker.createProducer(sessionBroker.createTopic("testTopic")); consumerBroker = sessionBroker.createConsumer(sessionBroker.createTopic("testTopic")); Connection netWorkBrokerConnection = new ActiveMQConnectionFactory(networkedBroker.getVmConnectorURI()).createConnection(); netWorkBrokerConnection.start(); sessionNetworkBroker = netWorkBrokerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); producerNetworkBroker = sessionNetworkBroker.createProducer(sessionBroker.createTopic("testTopic")); consumerNetworkBroker = sessionNetworkBroker.createConsumer(sessionBroker.createTopic("testTopic")); assertEquals(4, countMbeans(broker, "destinationType=Topic,destinationName=testTopic", 15000)); assertEquals(4, countMbeans(networkedBroker, "destinationType=Topic,destinationName=testTopic", 15000)); producerBroker.send(sessionBroker.createTextMessage("test1")); producerNetworkBroker.send(sessionNetworkBroker.createTextMessage("test2")); assertEquals(2, countMbeans(networkedBroker, "destinationName=testTopic,direction=*", 10000)); assertEquals(2, countMbeans(broker, "destinationName=testTopic,direction=*", 10000)); } finally { if (producerBroker != null) { producerBroker.close(); } if (consumerBroker != null) { consumerBroker.close(); } if (sessionBroker != null) { sessionBroker.close(); } if (sessionNetworkBroker != null) { sessionNetworkBroker.close(); } if (producerNetworkBroker != null) { producerNetworkBroker.close(); } if (consumerNetworkBroker != null) { consumerNetworkBroker.close(); } networkedBroker.stop(); networkedBroker.waitUntilStopped(); } assertEquals(0, countMbeans(broker, "destinationName=testTopic,direction=*", 1500)); } finally { broker.stop(); broker.waitUntilStopped(); } } private int countMbeans(BrokerService broker, String type) throws Exception { return countMbeans(broker, type, 0); } private int countMbeans(BrokerService broker, String type, int timeout) throws Exception { final long expiryTime = System.currentTimeMillis() + timeout; if (!type.contains("=")) { type = type + "=*"; } final ObjectName beanName = new ObjectName("org.apache.activemq:type=Broker,brokerName=" + broker.getBrokerName() + "," + type +",*"); Set<ObjectName> mbeans = null; int count = 0; do { if (timeout > 0) { Thread.sleep(100); } LOG.info("Query name: " + beanName); mbeans = mBeanServer.queryNames(beanName, null); if (mbeans != null) { count = mbeans.size(); } else { logAllMbeans(broker); } } while ((mbeans == null || mbeans.isEmpty()) && expiryTime > System.currentTimeMillis()); // If port 1099 is in use when the Broker starts, starting the jmx connector // will fail. So, if we have no mbsc to query, skip the test. if (timeout > 0) { assumeNotNull(mbeans); } return count; } private void logAllMbeans(BrokerService broker) throws MalformedURLException { try { // trace all existing MBeans Set<?> all = mBeanServer.queryNames(null, null); LOG.info("Total MBean count=" + all.size()); for (Object o : all) { ObjectInstance bean = (ObjectInstance)o; LOG.info(bean.getObjectName().toString()); } } catch (Exception ignored) { LOG.warn("getMBeanServer ex: " + ignored); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.security; import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; import java.io.IOException; import java.net.InetAddress; import java.net.Socket; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Base64; import java.util.Locale; import java.util.Map; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.AuthorizeCallback; import javax.security.sasl.RealmCallback; import javax.security.sasl.RealmChoiceCallback; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.thrift.TException; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TSaslClientTransport; import org.apache.thrift.transport.TSaslServerTransport; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; /** * Functions that bridge Thrift's SASL transports to Hadoop's * SASL callback handlers and authentication classes. * HIVE-11378 This class is not directly used anymore. It now exists only as a shell to be * extended by HadoopThriftAuthBridge23 in 0.23 shims. I have made it abstract * to avoid maintenance errors. */ public abstract class HadoopThriftAuthBridge { private static final Logger LOG = LoggerFactory.getLogger(HadoopThriftAuthBridge.class); // We want to have only one auth bridge. In the past this was handled by ShimLoader, but since // we're no longer using that we'll do it here. private static volatile HadoopThriftAuthBridge self = null; public static HadoopThriftAuthBridge getBridge() { if (self == null) { synchronized (HadoopThriftAuthBridge.class) { if (self == null) self = new HadoopThriftAuthBridge23(); } } return self; } public Client createClient() { return new Client(); } public Client createClientWithConf(String authMethod) { UserGroupInformation ugi; try { ugi = UserGroupInformation.getLoginUser(); } catch(IOException e) { throw new IllegalStateException("Unable to get current login user: " + e, e); } if (loginUserHasCurrentAuthMethod(ugi, authMethod)) { LOG.debug("Not setting UGI conf as passed-in authMethod of {} = current", authMethod); return new Client(); } else { LOG.debug("Setting UGI conf as passed-in authMethod of {} != current", authMethod); Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, authMethod); UserGroupInformation.setConfiguration(conf); return new Client(); } } public Server createServer(String keytabFile, String principalConf, String clientConf) throws TTransportException { return new Server(keytabFile, principalConf, clientConf); } public String getServerPrincipal(String principalConfig, String host) throws IOException { String serverPrincipal = SecurityUtil.getServerPrincipal(principalConfig, host); String names[] = SaslRpcServer.splitKerberosName(serverPrincipal); if (names.length != 3) { throw new IOException( "Kerberos principal name does NOT have the expected hostname part: " + serverPrincipal); } return serverPrincipal; } /** * Method to get canonical-ized hostname, given a hostname (possibly a CNAME). * This should allow for service-principals to use simplified CNAMEs. * @param hostName The hostname to be canonical-ized. * @return Given a CNAME, the canonical-ized hostname is returned. If not found, the original hostname is returned. */ public String getCanonicalHostName(String hostName) { try { return InetAddress.getByName(hostName).getCanonicalHostName(); } catch(UnknownHostException exception) { LOG.warn("Could not retrieve canonical hostname for " + hostName, exception); return hostName; } } public UserGroupInformation getCurrentUGIWithConf(String authMethod) throws IOException { UserGroupInformation ugi; try { ugi = UserGroupInformation.getCurrentUser(); } catch(IOException e) { throw new IllegalStateException("Unable to get current user: " + e, e); } if (loginUserHasCurrentAuthMethod(ugi, authMethod)) { LOG.debug("Not setting UGI conf as passed-in authMethod of {} = current", authMethod); return ugi; } else { LOG.debug("Setting UGI conf as passed-in authMethod of {} != current", authMethod); Configuration conf = new Configuration(); conf.set(HADOOP_SECURITY_AUTHENTICATION, authMethod); UserGroupInformation.setConfiguration(conf); return UserGroupInformation.getCurrentUser(); } } /** * Return true if the current login user is already using the given authMethod. * * Used above to ensure we do not create a new Configuration object and as such * lose other settings such as the cluster to which the JVM is connected. Required * for oozie since it does not have a core-site.xml see HIVE-7682 */ private boolean loginUserHasCurrentAuthMethod(UserGroupInformation ugi, String sAuthMethod) { AuthenticationMethod authMethod; try { // based on SecurityUtil.getAuthenticationMethod() authMethod = Enum.valueOf(AuthenticationMethod.class, sAuthMethod.toUpperCase(Locale.ENGLISH)); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("Invalid attribute value for " + HADOOP_SECURITY_AUTHENTICATION + " of " + sAuthMethod, iae); } LOG.debug("Current authMethod = {}", ugi.getAuthenticationMethod()); return ugi.getAuthenticationMethod().equals(authMethod); } /** * Read and return Hadoop SASL configuration which can be configured using * "hadoop.rpc.protection" * @param conf * @return Hadoop SASL configuration */ public abstract Map<String, String> getHadoopSaslProperties(Configuration conf); public static class Client { /** * Create a client-side SASL transport that wraps an underlying transport. * * @param methodStr The authentication method to use. Currently only KERBEROS is * supported. * @param principalConfig The Kerberos principal of the target server. * @param underlyingTransport The underlying transport mechanism, usually a TSocket. * @param saslProps the sasl properties to create the client with */ public TTransport createClientTransport( String principalConfig, String host, String methodStr, String tokenStrForm, final TTransport underlyingTransport, final Map<String, String> saslProps) throws IOException { final AuthMethod method = AuthMethod.valueOf(AuthMethod.class, methodStr); TTransport saslTransport = null; switch (method) { case DIGEST: Token<DelegationTokenIdentifier> t= new Token<>(); t.decodeFromUrlString(tokenStrForm); try { saslTransport = new TSaslClientTransport( method.getMechanismName(), null, null, SaslRpcServer.SASL_DEFAULT_REALM, saslProps, new SaslClientCallbackHandler(t), underlyingTransport); } catch (TTransportException e) { e.printStackTrace(); } return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser()); case KERBEROS: String serverPrincipal = SecurityUtil.getServerPrincipal(principalConfig, host); final String names[] = SaslRpcServer.splitKerberosName(serverPrincipal); if (names.length != 3) { throw new IOException( "Kerberos principal name does NOT have the expected hostname part: " + serverPrincipal); } try { return UserGroupInformation.getCurrentUser().doAs( new PrivilegedExceptionAction<TUGIAssumingTransport>() { @Override public TUGIAssumingTransport run() throws IOException, TTransportException { TTransport saslTransport = new TSaslClientTransport( method.getMechanismName(), null, names[0], names[1], saslProps, null, underlyingTransport); return new TUGIAssumingTransport(saslTransport, UserGroupInformation.getCurrentUser()); } }); } catch (InterruptedException | SaslException se) { throw new IOException("Could not instantiate SASL transport", se); } default: throw new IOException("Unsupported authentication method: " + method); } } private static class SaslClientCallbackHandler implements CallbackHandler { private final String userName; private final char[] userPassword; public SaslClientCallbackHandler(Token<? extends TokenIdentifier> token) { this.userName = encodeIdentifier(token.getIdentifier()); this.userPassword = encodePassword(token.getPassword()); } @Override public void handle(Callback[] callbacks) throws UnsupportedCallbackException { NameCallback nc = null; PasswordCallback pc = null; RealmCallback rc = null; for (Callback callback : callbacks) { if (callback instanceof RealmChoiceCallback) { continue; } else if (callback instanceof NameCallback) { nc = (NameCallback) callback; } else if (callback instanceof PasswordCallback) { pc = (PasswordCallback) callback; } else if (callback instanceof RealmCallback) { rc = (RealmCallback) callback; } else { throw new UnsupportedCallbackException(callback, "Unrecognized SASL client callback"); } } if (nc != null) { LOG.debug("SASL client callback: setting username: {}", userName); nc.setName(userName); } if (pc != null) { LOG.debug("SASL client callback: setting userPassword"); pc.setPassword(userPassword); } if (rc != null) { LOG.debug("SASL client callback: setting realm: {}", rc.getDefaultText()); rc.setText(rc.getDefaultText()); } } static String encodeIdentifier(byte[] identifier) { return new String(Base64.getEncoder().encode(identifier), StandardCharsets.UTF_8); } static char[] encodePassword(byte[] password) { return Base64.getEncoder().encodeToString(password).toCharArray(); } } } public static class Server { public enum ServerMode { HIVESERVER2, METASTORE }; protected final UserGroupInformation realUgi; protected final UserGroupInformation clientValidationUGI; protected DelegationTokenSecretManager secretManager; public Server() throws TTransportException { try { realUgi = UserGroupInformation.getCurrentUser(); clientValidationUGI = UserGroupInformation.getCurrentUser(); } catch (IOException ioe) { throw new TTransportException(ioe); } } /** * Create a server with a kerberos keytab/principal. */ protected Server(String keytabFile, String principalConf, String clientConf) throws TTransportException { if (keytabFile == null || keytabFile.isEmpty()) { throw new TTransportException("No keytab specified"); } if (principalConf == null || principalConf.isEmpty()) { throw new TTransportException("No principal specified"); } if (clientConf == null || clientConf.isEmpty()) { // Don't bust existing setups. LOG.warn("Client-facing principal not set. Using server-side setting: " + principalConf); clientConf = principalConf; } // Login from the keytab String kerberosName; try { LOG.info("Logging in via CLIENT based principal"); kerberosName = SecurityUtil.getServerPrincipal(clientConf, "0.0.0.0"); UserGroupInformation.loginUserFromKeytab( kerberosName, keytabFile); clientValidationUGI = UserGroupInformation.getLoginUser(); assert clientValidationUGI.isFromKeytab(); LOG.info("Logging in via SERVER based principal"); kerberosName = SecurityUtil.getServerPrincipal(principalConf, "0.0.0.0"); UserGroupInformation.loginUserFromKeytab( kerberosName, keytabFile); realUgi = UserGroupInformation.getLoginUser(); assert realUgi.isFromKeytab(); } catch (IOException ioe) { throw new TTransportException(ioe); } } public void setSecretManager(DelegationTokenSecretManager secretManager) { this.secretManager = secretManager; } /** * Create a TTransportFactory that, upon connection of a client socket, * negotiates a Kerberized SASL transport. The resulting TTransportFactory * can be passed as both the input and output transport factory when * instantiating a TThreadPoolServer, for example. * * @param saslProps Map of SASL properties */ public TTransportFactory createTransportFactory(Map<String, String> saslProps) throws TTransportException { TSaslServerTransport.Factory transFactory = createSaslServerTransportFactory(saslProps); return new TUGIAssumingTransportFactory(transFactory, clientValidationUGI); } /** * Create a TSaslServerTransport.Factory that, upon connection of a client * socket, negotiates a Kerberized SASL transport. * * @param saslProps Map of SASL properties */ public TSaslServerTransport.Factory createSaslServerTransportFactory( Map<String, String> saslProps) throws TTransportException { // Parse out the kerberos principal, host, realm. String kerberosName = clientValidationUGI.getUserName(); final String names[] = SaslRpcServer.splitKerberosName(kerberosName); if (names.length != 3) { throw new TTransportException("Kerberos principal should have 3 parts: " + kerberosName); } TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory(); transFactory.addServerDefinition( AuthMethod.KERBEROS.getMechanismName(), names[0], names[1], // two parts of kerberos principal saslProps, new SaslRpcServer.SaslGssCallbackHandler()); transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, saslProps, new SaslDigestCallbackHandler(secretManager)); return transFactory; } /** * Wrap a TTransportFactory in such a way that, before processing any RPC, it * assumes the UserGroupInformation of the user authenticated by * the SASL transport. */ public TTransportFactory wrapTransportFactory(TTransportFactory transFactory) { return new TUGIAssumingTransportFactory(transFactory, realUgi); } /** * Similar to the above function, except use client facing UGI. */ public TTransportFactory wrapTransportFactoryInClientUGI(TTransportFactory transFactory) { return new TUGIAssumingTransportFactory(transFactory, clientValidationUGI); } /** * Wrap a TProcessor in such a way that, before processing any RPC, it * assumes the UserGroupInformation of the user authenticated by * the SASL transport. */ public TProcessor wrapProcessor(TProcessor processor) { return new TUGIAssumingProcessor(processor, secretManager, true); } /** * Wrap a TProcessor to capture the client information like connecting userid, ip etc */ public TProcessor wrapNonAssumingProcessor(TProcessor processor) { return new TUGIAssumingProcessor(processor, secretManager, false); } final static ThreadLocal<InetAddress> remoteAddress = new ThreadLocal<InetAddress>() { @Override protected InetAddress initialValue() { return null; } }; public InetAddress getRemoteAddress() { return remoteAddress.get(); } final static ThreadLocal<AuthenticationMethod> authenticationMethod = new ThreadLocal<AuthenticationMethod>() { @Override protected AuthenticationMethod initialValue() { return AuthenticationMethod.TOKEN; } }; private static ThreadLocal<String> remoteUser = new ThreadLocal<String> () { @Override protected String initialValue() { return null; } }; public String getRemoteUser() { return remoteUser.get(); } private final static ThreadLocal<String> userAuthMechanism = new ThreadLocal<String>() { @Override protected String initialValue() { return AuthMethod.KERBEROS.getMechanismName(); } }; public String getUserAuthMechanism() { return userAuthMechanism.get(); } /** CallbackHandler for SASL DIGEST-MD5 mechanism */ // This code is pretty much completely based on Hadoop's // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not // use that Hadoop class as-is was because it needs a Server.Connection object // which is relevant in hadoop rpc but not here in the metastore - so the // code below does not deal with the Connection Server.object. static class SaslDigestCallbackHandler implements CallbackHandler { private final DelegationTokenSecretManager secretManager; public SaslDigestCallbackHandler( DelegationTokenSecretManager secretManager) { this.secretManager = secretManager; } private char[] getPassword(DelegationTokenIdentifier tokenid) throws InvalidToken { return encodePassword(secretManager.retrievePassword(tokenid)); } private char[] encodePassword(byte[] password) { return Base64.getEncoder().encodeToString(password).toCharArray(); } /** {@inheritDoc} */ @Override public void handle(Callback[] callbacks) throws InvalidToken, UnsupportedCallbackException { NameCallback nc = null; PasswordCallback pc = null; AuthorizeCallback ac = null; for (Callback callback : callbacks) { if (callback instanceof AuthorizeCallback) { ac = (AuthorizeCallback) callback; } else if (callback instanceof NameCallback) { nc = (NameCallback) callback; } else if (callback instanceof PasswordCallback) { pc = (PasswordCallback) callback; } else if (callback instanceof RealmCallback) { continue; // realm is ignored } else { throw new UnsupportedCallbackException(callback, "Unrecognized SASL DIGEST-MD5 Callback"); } } if (pc != null) { DelegationTokenIdentifier tokenIdentifier = SaslRpcServer. getIdentifier(nc.getDefaultName(), secretManager); char[] password = getPassword(tokenIdentifier); LOG.debug("SASL server DIGEST-MD5 callback: setting password " + "for client:{}", tokenIdentifier.getUser()); pc.setPassword(password); } if (ac != null) { String authid = ac.getAuthenticationID(); String authzid = ac.getAuthorizationID(); if (authid.equals(authzid)) { ac.setAuthorized(true); } else { ac.setAuthorized(false); } if (ac.isAuthorized()) { if (LOG.isDebugEnabled()) { String username = SaslRpcServer.getIdentifier(authzid, secretManager).getUser().getUserName(); LOG.debug("SASL server DIGEST-MD5 callback: setting " + "canonicalized client ID: " + username); } ac.setAuthorizedID(authzid); } } } } /** * Processor that pulls the SaslServer object out of the transport, and * assumes the remote user's UGI before calling through to the original * processor. * * This is used on the server side to set the UGI for each specific call. */ protected static class TUGIAssumingProcessor implements TProcessor { final TProcessor wrapped; DelegationTokenSecretManager secretManager; boolean useProxy; TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager, boolean useProxy) { this.wrapped = wrapped; this.secretManager = secretManager; this.useProxy = useProxy; } @Override public void process(final TProtocol inProt, final TProtocol outProt) throws TException { TTransport trans = inProt.getTransport(); if (!(trans instanceof TSaslServerTransport)) { throw new TException("Unexpected non-SASL transport " + trans.getClass()); } TSaslServerTransport saslTrans = (TSaslServerTransport)trans; SaslServer saslServer = saslTrans.getSaslServer(); String authId = saslServer.getAuthorizationID(); LOG.debug("Sasl Server AUTH ID: {}", authId); String endUser = authId; Socket socket = ((TSocket)(saslTrans.getUnderlyingTransport())).getSocket(); remoteAddress.set(socket.getInetAddress()); String mechanismName = saslServer.getMechanismName(); userAuthMechanism.set(mechanismName); if (AuthMethod.PLAIN.getMechanismName().equalsIgnoreCase(mechanismName)) { remoteUser.set(endUser); wrapped.process(inProt, outProt); return; } authenticationMethod.set(AuthenticationMethod.KERBEROS); if(AuthMethod.TOKEN.getMechanismName().equalsIgnoreCase(mechanismName)) { try { TokenIdentifier tokenId = SaslRpcServer.getIdentifier(authId, secretManager); endUser = tokenId.getUser().getUserName(); authenticationMethod.set(AuthenticationMethod.TOKEN); } catch (InvalidToken e) { throw new TException(e.getMessage()); } } UserGroupInformation clientUgi = null; try { if (useProxy) { clientUgi = UserGroupInformation.createProxyUser( endUser, UserGroupInformation.getLoginUser()); remoteUser.set(clientUgi.getShortUserName()); LOG.debug("Set remoteUser: {}", remoteUser.get()); clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() { @Override public Boolean run() { try { wrapped.process(inProt, outProt); return true; } catch (TException te) { throw new RuntimeException(te); } } }); return; } else { // use the short user name for the request UserGroupInformation endUserUgi = UserGroupInformation.createRemoteUser(endUser); remoteUser.set(endUserUgi.getShortUserName()); LOG.debug("Set remoteUser: {}, from endUser: {}", remoteUser.get(), endUser); wrapped.process(inProt, outProt); return; } } catch (RuntimeException rte) { if (rte.getCause() instanceof TException) { throw (TException)rte.getCause(); } throw rte; } catch (InterruptedException ie) { throw new RuntimeException(ie); // unexpected! } catch (IOException ioe) { throw new RuntimeException(ioe); // unexpected! } finally { if (clientUgi != null) { try { FileSystem.closeAllForUGI(clientUgi); } catch (IOException exception) { LOG.error("Could not clean up file-system handles for UGI: " + clientUgi, exception); } } } } } /** * A TransportFactory that wraps another one, but assumes a specified UGI * before calling through. * * This is used on the server side to assume the server's Principal when accepting * clients. */ static class TUGIAssumingTransportFactory extends TTransportFactory { private final UserGroupInformation ugi; private final TTransportFactory wrapped; public TUGIAssumingTransportFactory(TTransportFactory wrapped, UserGroupInformation ugi) { assert wrapped != null; assert ugi != null; this.wrapped = wrapped; this.ugi = ugi; } @Override public TTransport getTransport(final TTransport trans) { return ugi.doAs(new PrivilegedAction<TTransport>() { @Override public TTransport run() { try { return wrapped.getTransport(trans); } catch (TTransportException e) { e.printStackTrace(); } return null; } }); } } } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.vending.expansion.downloader.impl; import com.google.android.vending.expansion.downloader.Constants; import com.google.android.vending.expansion.downloader.DownloadProgressInfo; import com.google.android.vending.expansion.downloader.DownloaderServiceMarshaller; import com.google.android.vending.expansion.downloader.Helpers; import com.google.android.vending.expansion.downloader.IDownloaderClient; import com.google.android.vending.expansion.downloader.IDownloaderService; import com.google.android.vending.expansion.downloader.IStub; import com.google.android.vending.licensing.AESObfuscator; import com.google.android.vending.licensing.APKExpansionPolicy; import com.google.android.vending.licensing.LicenseChecker; import com.google.android.vending.licensing.LicenseCheckerCallback; import com.google.android.vending.licensing.Policy; import android.app.AlarmManager; import android.app.PendingIntent; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager.NameNotFoundException; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.wifi.WifiManager; import android.os.Handler; import android.os.IBinder; import android.os.Messenger; import android.os.SystemClock; import android.provider.Settings.Secure; import android.telephony.TelephonyManager; import android.util.Log; import java.io.File; /** * Performs the background downloads requested by applications that use the * Downloads provider. This service does not run as a foreground task, so * Android may kill it off at will, but it will try to restart itself if it can. * Note that Android by default will kill off any process that has an open file * handle on the shared (SD Card) partition if the partition is unmounted. */ public abstract class DownloaderService extends CustomIntentService implements IDownloaderService { public DownloaderService() { super("LVLDownloadService"); } private static final String LOG_TAG = "LVLDL"; // the following NETWORK_* constants are used to indicates specific reasons // for disallowing a // download from using a network, since specific causes can require special // handling /** * The network is usable for the given download. */ public static final int NETWORK_OK = 1; /** * There is no network connectivity. */ public static final int NETWORK_NO_CONNECTION = 2; /** * The download exceeds the maximum size for this network. */ public static final int NETWORK_UNUSABLE_DUE_TO_SIZE = 3; /** * The download exceeds the recommended maximum size for this network, the * user must confirm for this download to proceed without WiFi. */ public static final int NETWORK_RECOMMENDED_UNUSABLE_DUE_TO_SIZE = 4; /** * The current connection is roaming, and the download can't proceed over a * roaming connection. */ public static final int NETWORK_CANNOT_USE_ROAMING = 5; /** * The app requesting the download specific that it can't use the current * network connection. */ public static final int NETWORK_TYPE_DISALLOWED_BY_REQUESTOR = 6; /** * For intents used to notify the user that a download exceeds a size * threshold, if this extra is true, WiFi is required for this download * size; otherwise, it is only recommended. */ public static final String EXTRA_IS_WIFI_REQUIRED = "isWifiRequired"; public static final String EXTRA_FILE_NAME = "downloadId"; /** * Used with DOWNLOAD_STATUS */ public static final String EXTRA_STATUS_STATE = "ESS"; public static final String EXTRA_STATUS_TOTAL_SIZE = "ETS"; public static final String EXTRA_STATUS_CURRENT_FILE_SIZE = "CFS"; public static final String EXTRA_STATUS_TOTAL_PROGRESS = "TFP"; public static final String EXTRA_STATUS_CURRENT_PROGRESS = "CFP"; public static final String ACTION_DOWNLOADS_CHANGED = "downloadsChanged"; /** * Broadcast intent action sent by the download manager when a download * completes. */ public final static String ACTION_DOWNLOAD_COMPLETE = "lvldownloader.intent.action.DOWNLOAD_COMPLETE"; /** * Broadcast intent action sent by the download manager when download status * changes. */ public final static String ACTION_DOWNLOAD_STATUS = "lvldownloader.intent.action.DOWNLOAD_STATUS"; /* * Lists the states that the download manager can set on a download to * notify applications of the download progress. The codes follow the HTTP * families:<br> 1xx: informational<br> 2xx: success<br> 3xx: redirects (not * used by the download manager)<br> 4xx: client errors<br> 5xx: server * errors */ /** * Returns whether the status is informational (i.e. 1xx). */ public static boolean isStatusInformational(int status) { return (status >= 100 && status < 200); } /** * Returns whether the status is a success (i.e. 2xx). */ public static boolean isStatusSuccess(int status) { return (status >= 200 && status < 300); } /** * Returns whether the status is an error (i.e. 4xx or 5xx). */ public static boolean isStatusError(int status) { return (status >= 400 && status < 600); } /** * Returns whether the status is a client error (i.e. 4xx). */ public static boolean isStatusClientError(int status) { return (status >= 400 && status < 500); } /** * Returns whether the status is a server error (i.e. 5xx). */ public static boolean isStatusServerError(int status) { return (status >= 500 && status < 600); } /** * Returns whether the download has completed (either with success or * error). */ public static boolean isStatusCompleted(int status) { return (status >= 200 && status < 300) || (status >= 400 && status < 600); } /** * This download hasn't stated yet */ public static final int STATUS_PENDING = 190; /** * This download has started */ public static final int STATUS_RUNNING = 192; /** * This download has been paused by the owning app. */ public static final int STATUS_PAUSED_BY_APP = 193; /** * This download encountered some network error and is waiting before * retrying the request. */ public static final int STATUS_WAITING_TO_RETRY = 194; /** * This download is waiting for network connectivity to proceed. */ public static final int STATUS_WAITING_FOR_NETWORK = 195; /** * This download is waiting for a Wi-Fi connection to proceed or for * permission to download over cellular. */ public static final int STATUS_QUEUED_FOR_WIFI_OR_CELLULAR_PERMISSION = 196; /** * This download is waiting for a Wi-Fi connection to proceed. */ public static final int STATUS_QUEUED_FOR_WIFI = 197; /** * This download has successfully completed. Warning: there might be other * status values that indicate success in the future. Use isSucccess() to * capture the entire category. * * @hide */ public static final int STATUS_SUCCESS = 200; /** * The requested URL is no longer available */ public static final int STATUS_FORBIDDEN = 403; /** * The file was delivered incorrectly */ public static final int STATUS_FILE_DELIVERED_INCORRECTLY = 487; /** * The requested destination file already exists. */ public static final int STATUS_FILE_ALREADY_EXISTS_ERROR = 488; /** * Some possibly transient error occurred, but we can't resume the download. */ public static final int STATUS_CANNOT_RESUME = 489; /** * This download was canceled * * @hide */ public static final int STATUS_CANCELED = 490; /** * This download has completed with an error. Warning: there will be other * status values that indicate errors in the future. Use isStatusError() to * capture the entire category. */ public static final int STATUS_UNKNOWN_ERROR = 491; /** * This download couldn't be completed because of a storage issue. * Typically, that's because the filesystem is missing or full. Use the more * specific {@link #STATUS_INSUFFICIENT_SPACE_ERROR} and * {@link #STATUS_DEVICE_NOT_FOUND_ERROR} when appropriate. * * @hide */ public static final int STATUS_FILE_ERROR = 492; /** * This download couldn't be completed because of an HTTP redirect response * that the download manager couldn't handle. * * @hide */ public static final int STATUS_UNHANDLED_REDIRECT = 493; /** * This download couldn't be completed because of an unspecified unhandled * HTTP code. * * @hide */ public static final int STATUS_UNHANDLED_HTTP_CODE = 494; /** * This download couldn't be completed because of an error receiving or * processing data at the HTTP level. * * @hide */ public static final int STATUS_HTTP_DATA_ERROR = 495; /** * This download couldn't be completed because of an HttpException while * setting up the request. * * @hide */ public static final int STATUS_HTTP_EXCEPTION = 496; /** * This download couldn't be completed because there were too many * redirects. * * @hide */ public static final int STATUS_TOO_MANY_REDIRECTS = 497; /** * This download couldn't be completed due to insufficient storage space. * Typically, this is because the SD card is full. * * @hide */ public static final int STATUS_INSUFFICIENT_SPACE_ERROR = 498; /** * This download couldn't be completed because no external storage device * was found. Typically, this is because the SD card is not mounted. * * @hide */ public static final int STATUS_DEVICE_NOT_FOUND_ERROR = 499; /** * This download is allowed to run. * * @hide */ public static final int CONTROL_RUN = 0; /** * This download must pause at the first opportunity. * * @hide */ public static final int CONTROL_PAUSED = 1; /** * This download is visible but only shows in the notifications while it's * in progress. * * @hide */ public static final int VISIBILITY_VISIBLE = 0; /** * This download is visible and shows in the notifications while in progress * and after completion. * * @hide */ public static final int VISIBILITY_VISIBLE_NOTIFY_COMPLETED = 1; /** * This download doesn't show in the UI or in the notifications. * * @hide */ public static final int VISIBILITY_HIDDEN = 2; /** * Bit flag for setAllowedNetworkTypes corresponding to * {@link ConnectivityManager#TYPE_MOBILE}. */ public static final int NETWORK_MOBILE = 1 << 0; /** * Bit flag for setAllowedNetworkTypes corresponding to * {@link ConnectivityManager#TYPE_WIFI}. */ public static final int NETWORK_WIFI = 1 << 1; private final static String TEMP_EXT = ".tmp"; /** * Service thread status */ private static boolean sIsRunning; @Override public IBinder onBind(Intent paramIntent) { Log.d(Constants.TAG, "Service Bound"); return this.mServiceMessenger.getBinder(); } /** * Network state. */ private boolean mIsConnected; private boolean mIsFailover; private boolean mIsCellularConnection; private boolean mIsRoaming; private boolean mIsAtLeast3G; private boolean mIsAtLeast4G; private boolean mStateChanged; /** * Download state */ private int mControl; private int mStatus; public boolean isWiFi() { return mIsConnected && !mIsCellularConnection; } /** * Bindings to important services */ private ConnectivityManager mConnectivityManager; private WifiManager mWifiManager; /** * Package we are downloading for (defaults to package of application) */ private PackageInfo mPackageInfo; /** * Byte counts */ long mBytesSoFar; long mTotalLength; int mFileCount; /** * Used for calculating time remaining and speed */ long mBytesAtSample; long mMillisecondsAtSample; float mAverageDownloadSpeed; /** * Our binding to the network state broadcasts */ private BroadcastReceiver mConnReceiver; final private IStub mServiceStub = DownloaderServiceMarshaller.CreateStub(this); final private Messenger mServiceMessenger = mServiceStub.getMessenger(); private Messenger mClientMessenger; private DownloadNotification mNotification; private PendingIntent mPendingIntent; private PendingIntent mAlarmIntent; /** * Updates the network type based upon the type and subtype returned from * the connectivity manager. Subtype is only used for cellular signals. * * @param type * @param subType */ private void updateNetworkType(int type, int subType) { switch (type) { case ConnectivityManager.TYPE_WIFI: case ConnectivityManager.TYPE_ETHERNET: case ConnectivityManager.TYPE_BLUETOOTH: mIsCellularConnection = false; mIsAtLeast3G = false; mIsAtLeast4G = false; break; case ConnectivityManager.TYPE_WIMAX: mIsCellularConnection = true; mIsAtLeast3G = true; mIsAtLeast4G = true; break; case ConnectivityManager.TYPE_MOBILE: mIsCellularConnection = true; switch (subType) { case TelephonyManager.NETWORK_TYPE_1xRTT: case TelephonyManager.NETWORK_TYPE_CDMA: case TelephonyManager.NETWORK_TYPE_EDGE: case TelephonyManager.NETWORK_TYPE_GPRS: case TelephonyManager.NETWORK_TYPE_IDEN: mIsAtLeast3G = false; mIsAtLeast4G = false; break; case TelephonyManager.NETWORK_TYPE_HSDPA: case TelephonyManager.NETWORK_TYPE_HSUPA: case TelephonyManager.NETWORK_TYPE_HSPA: case TelephonyManager.NETWORK_TYPE_EVDO_0: case TelephonyManager.NETWORK_TYPE_EVDO_A: case TelephonyManager.NETWORK_TYPE_UMTS: mIsAtLeast3G = true; mIsAtLeast4G = false; break; case TelephonyManager.NETWORK_TYPE_LTE: // 4G case TelephonyManager.NETWORK_TYPE_EHRPD: // 3G ++ interop // with 4G case TelephonyManager.NETWORK_TYPE_HSPAP: // 3G ++ but // marketed as // 4G mIsAtLeast3G = true; mIsAtLeast4G = true; break; default: mIsCellularConnection = false; mIsAtLeast3G = false; mIsAtLeast4G = false; } } } private void updateNetworkState(NetworkInfo info) { boolean isConnected = mIsConnected; boolean isFailover = mIsFailover; boolean isCellularConnection = mIsCellularConnection; boolean isRoaming = mIsRoaming; boolean isAtLeast3G = mIsAtLeast3G; if (null != info) { mIsRoaming = info.isRoaming(); mIsFailover = info.isFailover(); mIsConnected = info.isConnected(); updateNetworkType(info.getType(), info.getSubtype()); } else { mIsRoaming = false; mIsFailover = false; mIsConnected = false; updateNetworkType(-1, -1); } mStateChanged = (mStateChanged || isConnected != mIsConnected || isFailover != mIsFailover || isCellularConnection != mIsCellularConnection || isRoaming != mIsRoaming || isAtLeast3G != mIsAtLeast3G); if (Constants.LOGVV) { if (mStateChanged) { Log.v(LOG_TAG, "Network state changed: "); Log.v(LOG_TAG, "Starting State: " + (isConnected ? "Connected " : "Not Connected ") + (isCellularConnection ? "Cellular " : "WiFi ") + (isRoaming ? "Roaming " : "Local ") + (isAtLeast3G ? "3G+ " : "<3G ")); Log.v(LOG_TAG, "Ending State: " + (mIsConnected ? "Connected " : "Not Connected ") + (mIsCellularConnection ? "Cellular " : "WiFi ") + (mIsRoaming ? "Roaming " : "Local ") + (mIsAtLeast3G ? "3G+ " : "<3G ")); if (isServiceRunning()) { if (mIsRoaming) { mStatus = STATUS_WAITING_FOR_NETWORK; mControl = CONTROL_PAUSED; } else if (mIsCellularConnection) { DownloadsDB db = DownloadsDB.getDB(this); int flags = db.getFlags(); if (0 == (flags & FLAGS_DOWNLOAD_OVER_CELLULAR)) { mStatus = STATUS_QUEUED_FOR_WIFI; mControl = CONTROL_PAUSED; } } } } } } /** * Polls the network state, setting the flags appropriately. */ void pollNetworkState() { if (null == mConnectivityManager) { mConnectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE); } if (null == mWifiManager) { mWifiManager = (WifiManager) getApplicationContext().getSystemService(Context.WIFI_SERVICE); } if (mConnectivityManager == null) { Log.w(Constants.TAG, "couldn't get connectivity manager to poll network state"); } else { NetworkInfo activeInfo = mConnectivityManager .getActiveNetworkInfo(); updateNetworkState(activeInfo); } } public static final int NO_DOWNLOAD_REQUIRED = 0; public static final int LVL_CHECK_REQUIRED = 1; public static final int DOWNLOAD_REQUIRED = 2; public static final String EXTRA_PACKAGE_NAME = "EPN"; public static final String EXTRA_PENDING_INTENT = "EPI"; public static final String EXTRA_MESSAGE_HANDLER = "EMH"; /** * Returns true if the LVL check is required * * @param db a downloads DB synchronized with the latest state * @param pi the package info for the project * @return returns true if the filenames need to be returned */ private static boolean isLVLCheckRequired(DownloadsDB db, PackageInfo pi) { // we need to update the LVL check and get a successful status to // proceed if (db.mVersionCode != pi.versionCode) { return true; } return false; } /** * Careful! Only use this internally. * * @return whether we think the service is running */ private static synchronized boolean isServiceRunning() { return sIsRunning; } private static synchronized void setServiceRunning(boolean isRunning) { sIsRunning = isRunning; } public static int startDownloadServiceIfRequired(Context context, Intent intent, Class<?> serviceClass) throws NameNotFoundException { final PendingIntent pendingIntent = (PendingIntent) intent .getParcelableExtra(EXTRA_PENDING_INTENT); return startDownloadServiceIfRequired(context, pendingIntent, serviceClass); } public static int startDownloadServiceIfRequired(Context context, PendingIntent pendingIntent, Class<?> serviceClass) throws NameNotFoundException { String packageName = context.getPackageName(); String className = serviceClass.getName(); return startDownloadServiceIfRequired(context, pendingIntent, packageName, className); } /** * Starts the download if necessary. This function starts a flow that does ` * many things. 1) Checks to see if the APK version has been checked and the * metadata database updated 2) If the APK version does not match, checks * the new LVL status to see if a new download is required 3) If the APK * version does match, then checks to see if the download(s) have been * completed 4) If the downloads have been completed, returns * NO_DOWNLOAD_REQUIRED The idea is that this can be called during the * startup of an application to quickly ascertain if the application needs * to wait to hear about any updated APK expansion files. Note that this * does mean that the application MUST be run for the first time with a * network connection, even if Market delivers all of the files. * * @param context * @param pendingIntent * @return true if the app should wait for more guidance from the * downloader, false if the app can continue * @throws NameNotFoundException */ public static int startDownloadServiceIfRequired(Context context, PendingIntent pendingIntent, String classPackage, String className) throws NameNotFoundException { // first: do we need to do an LVL update? // we begin by getting our APK version from the package manager final PackageInfo pi = context.getPackageManager().getPackageInfo( context.getPackageName(), 0); int status = NO_DOWNLOAD_REQUIRED; // the database automatically reads the metadata for version code // and download status when the instance is created DownloadsDB db = DownloadsDB.getDB(context); // we need to update the LVL check and get a successful status to // proceed if (isLVLCheckRequired(db, pi)) { status = LVL_CHECK_REQUIRED; } // we don't have to update LVL. do we still have a download to start? if (db.mStatus == 0) { DownloadInfo[] infos = db.getDownloads(); if (null != infos) { for (DownloadInfo info : infos) { if (!Helpers.doesFileExist(context, info.mFileName, info.mTotalBytes, true)) { status = DOWNLOAD_REQUIRED; db.updateStatus(-1); break; } } } } else { status = DOWNLOAD_REQUIRED; } switch (status) { case DOWNLOAD_REQUIRED: case LVL_CHECK_REQUIRED: Intent fileIntent = new Intent(); fileIntent.setClassName(classPackage, className); fileIntent.putExtra(EXTRA_PENDING_INTENT, pendingIntent); context.startService(fileIntent); break; } return status; } @Override public void requestAbortDownload() { mControl = CONTROL_PAUSED; mStatus = STATUS_CANCELED; } @Override public void requestPauseDownload() { mControl = CONTROL_PAUSED; mStatus = STATUS_PAUSED_BY_APP; } @Override public void setDownloadFlags(int flags) { DownloadsDB.getDB(this).updateFlags(flags); } @Override public void requestContinueDownload() { if (mControl == CONTROL_PAUSED) { mControl = CONTROL_RUN; } Intent fileIntent = new Intent(this, this.getClass()); fileIntent.putExtra(EXTRA_PENDING_INTENT, mPendingIntent); this.startService(fileIntent); } public abstract String getPublicKey(); public abstract byte[] getSALT(); public abstract String getAlarmReceiverClassName(); private class LVLRunnable implements Runnable { LVLRunnable(Context context, PendingIntent intent) { mContext = context; mPendingIntent = intent; } final Context mContext; @Override public void run() { setServiceRunning(true); mNotification.onDownloadStateChanged(IDownloaderClient.STATE_FETCHING_URL); String deviceId = Secure.getString(mContext.getContentResolver(), Secure.ANDROID_ID); final APKExpansionPolicy aep = new APKExpansionPolicy(mContext, new AESObfuscator(getSALT(), mContext.getPackageName(), deviceId)); // reset our policy back to the start of the world to force a // re-check aep.resetPolicy(); // let's try and get the OBB file from LVL first // Construct the LicenseChecker with a Policy. final LicenseChecker checker = new LicenseChecker(mContext, aep, getPublicKey() // Your public licensing key. ); checker.checkAccess(new LicenseCheckerCallback() { @Override public void allow(int reason) { try { int count = aep.getExpansionURLCount(); DownloadsDB db = DownloadsDB.getDB(mContext); int status = 0; if (count != 0) { for (int i = 0; i < count; i++) { String currentFileName = aep .getExpansionFileName(i); if (null != currentFileName) { DownloadInfo di = new DownloadInfo(i, currentFileName, mContext.getPackageName()); long fileSize = aep.getExpansionFileSize(i); if (handleFileUpdated(db, i, currentFileName, fileSize)) { status |= -1; di.resetDownload(); di.mUri = aep.getExpansionURL(i); di.mTotalBytes = fileSize; di.mStatus = status; db.updateDownload(di); } else { // we need to read the download // information // from // the database DownloadInfo dbdi = db .getDownloadInfoByFileName(di.mFileName); if (null == dbdi) { // the file exists already and is // the // correct size // was delivered by Market or // through // another mechanism Log.d(LOG_TAG, "file " + di.mFileName + " found. Not downloading."); di.mStatus = STATUS_SUCCESS; di.mTotalBytes = fileSize; di.mCurrentBytes = fileSize; di.mUri = aep.getExpansionURL(i); db.updateDownload(di); } else if (dbdi.mStatus != STATUS_SUCCESS) { // we just update the URL dbdi.mUri = aep.getExpansionURL(i); db.updateDownload(dbdi); status |= -1; } } } } } // first: do we need to do an LVL update? // we begin by getting our APK version from the package // manager PackageInfo pi; try { pi = mContext.getPackageManager().getPackageInfo( mContext.getPackageName(), 0); db.updateMetadata(pi.versionCode, status); Class<?> serviceClass = DownloaderService.this.getClass(); switch (startDownloadServiceIfRequired(mContext, mPendingIntent, serviceClass)) { case NO_DOWNLOAD_REQUIRED: mNotification .onDownloadStateChanged(IDownloaderClient.STATE_COMPLETED); break; case LVL_CHECK_REQUIRED: // DANGER WILL ROBINSON! Log.e(LOG_TAG, "In LVL checking loop!"); mNotification .onDownloadStateChanged(IDownloaderClient.STATE_FAILED_UNLICENSED); throw new RuntimeException( "Error with LVL checking and database integrity"); case DOWNLOAD_REQUIRED: // do nothing. the download will notify the // application // when things are done break; } } catch (NameNotFoundException e1) { e1.printStackTrace(); throw new RuntimeException( "Error with getting information from package name"); } } finally { setServiceRunning(false); } } @Override public void dontAllow(int reason) { try { switch (reason) { case Policy.NOT_LICENSED: mNotification .onDownloadStateChanged(IDownloaderClient.STATE_FAILED_UNLICENSED); break; case Policy.RETRY: mNotification .onDownloadStateChanged(IDownloaderClient.STATE_FAILED_FETCHING_URL); break; } } finally { setServiceRunning(false); } } @Override public void applicationError(int errorCode) { try { mNotification .onDownloadStateChanged(IDownloaderClient.STATE_FAILED_FETCHING_URL); } finally { setServiceRunning(false); } } }); } }; /** * Updates the LVL information from the server. * * @param context */ public void updateLVL(final Context context) { Context c = context.getApplicationContext(); Handler h = new Handler(c.getMainLooper()); h.post(new LVLRunnable(c, mPendingIntent)); } /** * The APK has been updated and a filename has been sent down from the * Market call. If the file has the same name as the previous file, we do * nothing as the file is guaranteed to be the same. If the file does not * have the same name, we download it if it hasn't already been delivered by * Market. * * @param index the index of the file from market (0 = main, 1 = patch) * @param filename the name of the new file * @param fileSize the size of the new file * @return */ public boolean handleFileUpdated(DownloadsDB db, int index, String filename, long fileSize) { DownloadInfo di = db.getDownloadInfoByFileName(filename); if (null != di) { String oldFile = di.mFileName; // cleanup if (null != oldFile) { if (filename.equals(oldFile)) { return false; } // remove partially downloaded file if it is there String deleteFile = Helpers.generateSaveFileName(this, oldFile); File f = new File(deleteFile); if (f.exists()) f.delete(); } } return !Helpers.doesFileExist(this, filename, fileSize, true); } private void scheduleAlarm(long wakeUp) { AlarmManager alarms = (AlarmManager) getSystemService(Context.ALARM_SERVICE); if (alarms == null) { Log.e(Constants.TAG, "couldn't get alarm manager"); return; } if (Constants.LOGV) { Log.v(Constants.TAG, "scheduling retry in " + wakeUp + "ms"); } String className = getAlarmReceiverClassName(); Intent intent = new Intent(Constants.ACTION_RETRY); intent.putExtra(EXTRA_PENDING_INTENT, mPendingIntent); intent.setClassName(this.getPackageName(), className); mAlarmIntent = PendingIntent.getBroadcast(this, 0, intent, PendingIntent.FLAG_ONE_SHOT); alarms.set( AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + wakeUp, mAlarmIntent ); } private void cancelAlarms() { if (null != mAlarmIntent) { AlarmManager alarms = (AlarmManager) getSystemService(Context.ALARM_SERVICE); if (alarms == null) { Log.e(Constants.TAG, "couldn't get alarm manager"); return; } alarms.cancel(mAlarmIntent); mAlarmIntent = null; } } /** * We use this to track network state, such as when WiFi, Cellular, etc. is * enabled when downloads are paused or in progress. */ private class InnerBroadcastReceiver extends BroadcastReceiver { final Service mService; InnerBroadcastReceiver(Service service) { mService = service; } @Override public void onReceive(Context context, Intent intent) { pollNetworkState(); if (mStateChanged && !isServiceRunning()) { Log.d(Constants.TAG, "InnerBroadcastReceiver Called"); Intent fileIntent = new Intent(context, mService.getClass()); fileIntent.putExtra(EXTRA_PENDING_INTENT, mPendingIntent); // send a new intent to the service context.startService(fileIntent); } } }; /** * This is the main thread for the Downloader. This thread is responsible * for queuing up downloads and other goodness. */ @Override protected void onHandleIntent(Intent intent) { setServiceRunning(true); try { // the database automatically reads the metadata for version code // and download status when the instance is created DownloadsDB db = DownloadsDB.getDB(this); final PendingIntent pendingIntent = (PendingIntent) intent .getParcelableExtra(EXTRA_PENDING_INTENT); if (null != pendingIntent) { mNotification.setClientIntent(pendingIntent); mPendingIntent = pendingIntent; } else if (null != mPendingIntent) { mNotification.setClientIntent(mPendingIntent); } else { Log.e(LOG_TAG, "Downloader started in bad state without notification intent."); return; } // when the LVL check completes, a successful response will update // the service if (isLVLCheckRequired(db, mPackageInfo)) { updateLVL(this); return; } // get each download DownloadInfo[] infos = db.getDownloads(); mBytesSoFar = 0; mTotalLength = 0; mFileCount = infos.length; for (DownloadInfo info : infos) { // We do an (simple) integrity check on each file, just to make // sure if (info.mStatus == STATUS_SUCCESS) { // verify that the file matches the state if (!Helpers.doesFileExist(this, info.mFileName, info.mTotalBytes, true)) { info.mStatus = 0; info.mCurrentBytes = 0; } } // get aggregate data mTotalLength += info.mTotalBytes; mBytesSoFar += info.mCurrentBytes; } // loop through all downloads and fetch them pollNetworkState(); if (null == mConnReceiver) { /** * We use this to track network state, such as when WiFi, * Cellular, etc. is enabled when downloads are paused or in * progress. */ mConnReceiver = new InnerBroadcastReceiver(this); IntentFilter intentFilter = new IntentFilter( ConnectivityManager.CONNECTIVITY_ACTION); intentFilter.addAction(WifiManager.WIFI_STATE_CHANGED_ACTION); registerReceiver(mConnReceiver, intentFilter); } for (DownloadInfo info : infos) { long startingCount = info.mCurrentBytes; if (info.mStatus != STATUS_SUCCESS) { DownloadThread dt = new DownloadThread(info, this, mNotification); cancelAlarms(); scheduleAlarm(Constants.ACTIVE_THREAD_WATCHDOG); dt.run(); cancelAlarms(); } db.updateFromDb(info); boolean setWakeWatchdog = false; int notifyStatus; switch (info.mStatus) { case STATUS_FORBIDDEN: // the URL is out of date updateLVL(this); return; case STATUS_SUCCESS: mBytesSoFar += info.mCurrentBytes - startingCount; db.updateMetadata(mPackageInfo.versionCode, 0); continue; case STATUS_FILE_DELIVERED_INCORRECTLY: // we may be on a network that is returning us a web // page on redirect notifyStatus = IDownloaderClient.STATE_PAUSED_NETWORK_SETUP_FAILURE; info.mCurrentBytes = 0; db.updateDownload(info); setWakeWatchdog = true; break; case STATUS_PAUSED_BY_APP: notifyStatus = IDownloaderClient.STATE_PAUSED_BY_REQUEST; break; case STATUS_WAITING_FOR_NETWORK: case STATUS_WAITING_TO_RETRY: notifyStatus = IDownloaderClient.STATE_PAUSED_NETWORK_UNAVAILABLE; setWakeWatchdog = true; break; case STATUS_QUEUED_FOR_WIFI_OR_CELLULAR_PERMISSION: case STATUS_QUEUED_FOR_WIFI: // look for more detail here if (null != mWifiManager) { if (!mWifiManager.isWifiEnabled()) { notifyStatus = IDownloaderClient.STATE_PAUSED_WIFI_DISABLED_NEED_CELLULAR_PERMISSION; setWakeWatchdog = true; break; } } notifyStatus = IDownloaderClient.STATE_PAUSED_NEED_CELLULAR_PERMISSION; setWakeWatchdog = true; break; case STATUS_CANCELED: notifyStatus = IDownloaderClient.STATE_FAILED_CANCELED; setWakeWatchdog = true; break; case STATUS_INSUFFICIENT_SPACE_ERROR: notifyStatus = IDownloaderClient.STATE_FAILED_SDCARD_FULL; setWakeWatchdog = true; break; case STATUS_DEVICE_NOT_FOUND_ERROR: notifyStatus = IDownloaderClient.STATE_PAUSED_SDCARD_UNAVAILABLE; setWakeWatchdog = true; break; default: notifyStatus = IDownloaderClient.STATE_FAILED; break; } if (setWakeWatchdog) { scheduleAlarm(Constants.WATCHDOG_WAKE_TIMER); } else { cancelAlarms(); } // failure or pause state mNotification.onDownloadStateChanged(notifyStatus); return; } // all downloads complete mNotification.onDownloadStateChanged(IDownloaderClient.STATE_COMPLETED); } finally { setServiceRunning(false); } } @Override public void onDestroy() { if (null != mConnReceiver) { unregisterReceiver(mConnReceiver); mConnReceiver = null; } mServiceStub.disconnect(this); super.onDestroy(); } public int getNetworkAvailabilityState(DownloadsDB db) { if (mIsConnected) { if (!mIsCellularConnection) return NETWORK_OK; int flags = db.mFlags; if (mIsRoaming) return NETWORK_CANNOT_USE_ROAMING; if (0 != (flags & FLAGS_DOWNLOAD_OVER_CELLULAR)) { return NETWORK_OK; } else { return NETWORK_TYPE_DISALLOWED_BY_REQUESTOR; } } return NETWORK_NO_CONNECTION; } @Override public void onCreate() { super.onCreate(); try { mPackageInfo = getPackageManager().getPackageInfo( getPackageName(), 0); ApplicationInfo ai = getApplicationInfo(); CharSequence applicationLabel = getPackageManager().getApplicationLabel(ai); mNotification = new DownloadNotification(this, applicationLabel); } catch (NameNotFoundException e) { e.printStackTrace(); } } /** * Exception thrown from methods called by generateSaveFile() for any fatal * error. */ public static class GenerateSaveFileError extends Exception { private static final long serialVersionUID = 3465966015408936540L; int mStatus; String mMessage; public GenerateSaveFileError(int status, String message) { mStatus = status; mMessage = message; } } /** * Returns the filename (where the file should be saved) from info about a * download */ public String generateTempSaveFileName(String fileName) { String path = Helpers.getSaveFilePath(this) + File.separator + fileName + TEMP_EXT; return path; } /** * Creates a filename (where the file should be saved) from info about a * download. */ public String generateSaveFile(String filename, long filesize) throws GenerateSaveFileError { String path = generateTempSaveFileName(filename); File expPath = new File(path); if (!Helpers.isExternalMediaMounted()) { Log.d(Constants.TAG, "External media not mounted: " + path); throw new GenerateSaveFileError(STATUS_DEVICE_NOT_FOUND_ERROR, "external media is not yet mounted"); } if (expPath.exists()) { Log.d(Constants.TAG, "File already exists: " + path); throw new GenerateSaveFileError(STATUS_FILE_ALREADY_EXISTS_ERROR, "requested destination file already exists"); } if (Helpers.getAvailableBytes(Helpers.getFilesystemRoot(path)) < filesize) { throw new GenerateSaveFileError(STATUS_INSUFFICIENT_SPACE_ERROR, "insufficient space on external storage"); } return path; } /** * @return a non-localized string appropriate for logging corresponding to * one of the NETWORK_* constants. */ public String getLogMessageForNetworkError(int networkError) { switch (networkError) { case NETWORK_RECOMMENDED_UNUSABLE_DUE_TO_SIZE: return "download size exceeds recommended limit for mobile network"; case NETWORK_UNUSABLE_DUE_TO_SIZE: return "download size exceeds limit for mobile network"; case NETWORK_NO_CONNECTION: return "no network connection available"; case NETWORK_CANNOT_USE_ROAMING: return "download cannot use the current network connection because it is roaming"; case NETWORK_TYPE_DISALLOWED_BY_REQUESTOR: return "download was requested to not use the current network type"; default: return "unknown error with network connectivity"; } } public int getControl() { return mControl; } public int getStatus() { return mStatus; } /** * Calculating a moving average for the speed so we don't get jumpy * calculations for time etc. */ static private final float SMOOTHING_FACTOR = 0.005f; public void notifyUpdateBytes(long totalBytesSoFar) { long timeRemaining; long currentTime = SystemClock.uptimeMillis(); if (0 != mMillisecondsAtSample) { // we have a sample. long timePassed = currentTime - mMillisecondsAtSample; long bytesInSample = totalBytesSoFar - mBytesAtSample; float currentSpeedSample = (float) bytesInSample / (float) timePassed; if (0 != mAverageDownloadSpeed) { mAverageDownloadSpeed = SMOOTHING_FACTOR * currentSpeedSample + (1 - SMOOTHING_FACTOR) * mAverageDownloadSpeed; } else { mAverageDownloadSpeed = currentSpeedSample; } timeRemaining = (long) ((mTotalLength - totalBytesSoFar) / mAverageDownloadSpeed); } else { timeRemaining = -1; } mMillisecondsAtSample = currentTime; mBytesAtSample = totalBytesSoFar; mNotification.onDownloadProgress( new DownloadProgressInfo(mTotalLength, totalBytesSoFar, timeRemaining, mAverageDownloadSpeed) ); } @Override protected boolean shouldStop() { // the database automatically reads the metadata for version code // and download status when the instance is created DownloadsDB db = DownloadsDB.getDB(this); if (db.mStatus == 0) { return true; } return false; } @Override public void requestDownloadStatus() { mNotification.resendState(); } @Override public void onClientUpdated(Messenger clientMessenger) { this.mClientMessenger = clientMessenger; mNotification.setMessenger(mClientMessenger); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.cli; import org.apache.falcon.FalconCLIConstants; import org.apache.falcon.entity.v0.SchemaHelper; import org.apache.falcon.metadata.RelationshipType; import org.apache.falcon.resource.TestContext; import org.apache.falcon.util.FalconTestUtil; import org.apache.falcon.util.OozieTestUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.util.Date; import java.util.Map; /** * Test for Falcon CLI. * * todo: Refactor both the classes to move this methods to helper; */ @Test(groups = {"exhaustive"}) public class FalconCLIIT { private InMemoryWriter stream = new InMemoryWriter(System.out); @BeforeClass public void prepare() throws Exception { TestContext.prepare(); } @AfterClass public void tearDown() throws Exception { TestContext.deleteEntitiesFromStore(); } public void testSubmitEntityValidCommands() throws Exception { FalconCLI.OUT.set(stream); String filePath; TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); context.setCluster(overlay.get("cluster")); Assert.assertEquals(stream.buffer.toString().trim(), "falcon/default/Submit successful (cluster) " + context.getClusterName()); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); Assert.assertEquals( stream.buffer.toString().trim(), "falcon/default/Submit successful (feed) " + overlay.get("inputFeedName")); // Test the lookup command Assert.assertEquals(executeWithURL("entity -lookup -type feed -path " + "/falcon/test/input/2014/11/23/23"), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -file " + filePath), 0); Assert.assertEquals( stream.buffer.toString().trim(), "falcon/default/Submit successful (feed) " + overlay.get("outputFeedName")); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submit -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -file " + filePath), 0); Assert.assertEquals( stream.buffer.toString().trim(), "falcon/default/Submit successful (process) " + overlay.get("processName")); } public void testListWithEmptyConfigStore() throws Exception { Assert.assertEquals(executeWithURL("entity -list -type process "), 0); } public void testSubmitAndScheduleEntityValidCommands() throws Exception { String filePath; TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type cluster -file " + filePath), -1); context.setCluster(overlay.get("cluster")); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type process -file " + filePath), 0); OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("entity -update -name " + overlay.get("processName") + " -type process -file " + filePath), 0); Assert.assertEquals(0, executeWithURL("entity -touch -name " + overlay.get("processName") + " -type process")); } public void testValidateValidCommands() throws Exception { String filePath; TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); Assert.assertEquals(executeWithURL("entity -validate -type cluster -file " + filePath), 0); context.setCluster(overlay.get("cluster")); Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); context.setCluster(overlay.get("cluster")); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -validate -type feed -file " + filePath), 0); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -validate -type feed -file " + filePath), 0); Assert.assertEquals(executeWithURL("entity -submit -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -validate -type process -file " + filePath), 0); Assert.assertEquals(executeWithURL("entity -submit -type process -file " + filePath), 0); } public void testDefinitionEntityValidCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -definition -type cluster -name " + overlay.get("cluster")), 0); Assert.assertEquals(executeWithURL("entity -definition -type feed -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -definition -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -definition -type process -name " + overlay.get("processName")), 0); } public void testScheduleEntityValidCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type cluster -name " + overlay.get("cluster")), -1); Assert.assertEquals(executeWithURL("entity -schedule -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName") + " -properties key:value"), 0); } public void testSkipDryRunValidCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals( executeWithURL("entity -schedule -skipDryRun -type cluster -name " + overlay.get("cluster")), -1); Assert.assertEquals( executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals( executeWithURL("entity -schedule -type process -skipDryRun -name " + overlay.get("processName")), 0); Assert.assertEquals(0, executeWithURL("entity -touch -skipDryRun -name " + overlay.get("processName") + " -type process")); String filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals( executeWithURL("entity -submitAndSchedule -skipDryRun -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals( executeWithURL("entity -validate -skipDryRun -type process -file " + filePath), 0); } public void testSuspendResumeStatusEntityValidCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -status -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("entity -suspend -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -suspend -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -suspend -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -status -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -resume -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -resume -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -resume -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -status -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -status -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -summary -type feed -cluster "+ overlay.get("cluster") + " -fields status,tags -start " + START_INSTANCE + " -filterBy TYPE:FEED -orderBy name -sortOrder asc " + " -offset 0 -numResults 1 -numInstances 5"), 0); Assert.assertEquals(executeWithURL("entity -summary -type process -fields status,pipelines" + " -cluster " + overlay.get("cluster") + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) + " -end " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy TYPE:PROCESS -orderBy name -sortOrder desc " + " -offset 0 -numResults 1 -numInstances 7"), 0); Assert.assertEquals(executeWithURL("entity -summary -type process -fields status,pipelines" + " -cluster " + overlay.get("cluster") + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) + " -end " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy TYPE:PROCESS -orderBy name -sortOrder invalid " + " -offset 0 -numResults 1 -numInstances 7"), -1); // No start or end date and with doAs option Assert.assertEquals(executeWithURL("entity -summary -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -fields status,pipelines" + " -cluster " + overlay.get("cluster") + " -filterBy TYPE:PROCESS -orderBy name " + " -offset 0 -numResults 1 -numInstances 7"), 0); } public void testSubCommandPresence() throws Exception { Assert.assertEquals(-1, executeWithURL("entity -type cluster ")); } public void testDeleteEntityValidCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -delete -type cluster -name " + overlay.get("cluster")), -1); Assert.assertEquals(executeWithURL("entity -delete -type feed -name " + overlay.get("inputFeedName")), -1); Assert.assertEquals(executeWithURL("entity -delete -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("outputFeedName")), -1); Assert.assertEquals(executeWithURL("entity -delete -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -delete -type feed -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -delete -type feed -name " + overlay.get("outputFeedName")), 0); } public void testInvalidCLIEntitycommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -name " + "name"), -1); Assert.assertEquals(executeWithURL("entity -schedule -type feed -file " + "name"), -1); } public void testInstanceRunningAndStatusCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); OozieTestUtils.waitForProcessWFtoStart(context); //Test the dependency command Assert.assertEquals(executeWithURL("instance -dependency -type feed -name " + overlay.get("inputFeedName") + " -instanceTime 2010-01-01T00:00Z"), 0); //Test the dependency command with doAs Assert.assertEquals(executeWithURL("instance -dependency -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName") + " -instanceTime 2010-01-01T00:00Z"), 0); Assert.assertEquals(executeWithURL("instance -status -type feed -name " + overlay.get("outputFeedName") + " -start " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -running -type process -name " + overlay.get("processName")), 0); // with doAs Assert.assertEquals(executeWithURL("instance -running -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -listing -type feed -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -status -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); //TEst instance status with doAs Assert.assertEquals(executeWithURL("instance -status -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction,replication -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -params -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); // doAs option Assert.assertEquals(executeWithURL("instance -params -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); // test filterBy, orderBy, offset, numResults String startTimeString = SchemaHelper.getDateFormat().format(new Date()); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + startTimeString + " -orderBy startTime -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + startTimeString + " -orderBy startTime -sortOrder desc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + startTimeString + " -orderBy startTime -sortOrder invalid -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy INVALID:FILTER -offset 0 -numResults 1"), -1); // testcase : start str is older than entity schedule time. Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date(10000)) + " -orderBy startTime -sortOrder asc -offset 0 -numResults 1"), 0); // testcase : end str is in future long futureTimeinMilliSecs = (new Date()).getTime()+ 86400000; Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date(10000)) + " -end " + SchemaHelper.getDateFormat().format(new Date(futureTimeinMilliSecs)) + " -orderBy startTime -offset 0 -numResults 1"), 0); // Both start and end dates are optional Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -status -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED,STARTEDAFTER:" + START_INSTANCE + " -orderBy startTime -sortOrder desc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -status -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -filterBy SOURCECLUSTER:" + overlay.get("cluster") + " -orderBy startTime -sortOrder desc -offset 0 -numResults 1"), 0); //Test list with doAs Assert.assertEquals(executeWithURL("instance -list -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -list -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy STATUS:SUCCEEDED -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -list -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy SOURCECLUSTER:" + overlay.get("src.cluster.name") + " -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date()) +" -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -list -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date()) +" -filterBy STATUS:SUCCEEDED -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -filterBy STATUS:SUCCEEDED -orderBy startTime -offset 1 -numResults 1"), 0); // When you get a cluster for which there are no feed entities, Assert.assertEquals(executeWithURL("entity -summary -type feed -cluster " + overlay.get("cluster") + " -fields status,tags" + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -offset 0 -numResults 1 -numInstances 3"), 0); } public void testInstanceRunningAndSearchSummaryCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("instance -status -type feed -name " + overlay.get("outputFeedName") + " -start " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -search"), 0); Assert.assertEquals(executeWithURL("instance -running -type process -name " + overlay.get("processName")), 0); //with doAs Assert.assertEquals(executeWithURL( "instance -search -type process -instanceStatus RUNNING -doAs " + FalconTestUtil.TEST_USER_2), 0); Assert.assertEquals(executeWithURL("instance -running -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("instance -summary -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); //with doAs Assert.assertEquals(executeWithURL("instance -summary -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -summary -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -params -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); //with doAs Assert.assertEquals(executeWithURL("instance -params -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName") + " -start " + START_INSTANCE), 0); } public void testInstanceSuspendAndResume() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); Assert.assertEquals(executeWithURL("instance -suspend -type feed -name " + overlay.get("inputFeedName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -suspend -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); // No end date, should fail. Assert.assertEquals(executeWithURL("instance -suspend -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); Assert.assertEquals(executeWithURL("instance -resume -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -resume -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -resume -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); } private static final String START_INSTANCE = "2012-04-20T00:00Z"; public void testInstanceKillAndRerun() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("instance -kill -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -kill -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); // Fail due to no end date Assert.assertEquals(executeWithURL("instance -kill -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -file " + createTempJobPropertiesFile()), 0); Assert.assertEquals(executeWithURL("instance -rerun -type feed -doAs " + FalconTestUtil.TEST_USER_2 + " -name " + overlay.get("inputFeedName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -file " + createTempJobPropertiesFile()), 0); Assert.assertEquals(executeWithURL("instance -rerun -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) + " -end " + SchemaHelper.getDateFormat().format(new Date()) + " -file " + createTempJobPropertiesFile()), 0); } @Test public void testEntityLineage() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); String filePath; filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); context.setCluster(overlay.get("cluster")); Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submit -type process -file " + filePath), 0); Assert.assertEquals(executeWithURL("metadata -lineage -pipeline testPipeline"), 0); Assert.assertEquals(executeWithURL("metadata -lineage -doAs " + FalconTestUtil.TEST_USER_2 + " -pipeline testPipeline"), 0); } @Test public void testEntityPaginationFilterByCommands() throws Exception { String filePath; TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type cluster -file " + filePath), -1); context.setCluster(overlay.get("cluster")); // this is necessary for lineage Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -validate -type process -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submitAndSchedule -type process -file " + filePath), 0); OozieTestUtils.waitForProcessWFtoStart(context); // test entity List cli Assert.assertEquals(executeWithURL("entity -list -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type feed,process -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type feed,process -offset 0 -numResults 1 " + "-nameseq abc -tagkeys abc"), 0); Assert.assertEquals(executeWithURL("entity -list -type cluster" + " -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status " + " -filterBy STATUS:SUBMITTED,TYPE:process -orderBy name " + " -sortOrder asc -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status,pipelines " + " -filterBy STATUS:SUBMITTED,type:process -orderBy name -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status,pipelines " + " -filterBy STATUS:SUBMITTED,pipelines:testPipeline " + " -orderBy name -sortOrder desc -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status,tags " + " -tags owner=producer@xyz.com,department=forecasting " + " -filterBy STATUS:SUBMITTED,type:process -orderBy name -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status " + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -list -type process -fields INVALID " + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -list -type process -fields status " + " -filterBy INVALID:FILTER,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -definition -type cluster -name " + overlay.get("cluster")), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status,tags " + " -tags owner=producer@xyz.com,department=forecasting " + " -filterBy STATUS:SUBMITTED,type:process " + " -orderBy name -sortOrder invalid -offset 1 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -status -type feed -name " + overlay.get("outputFeedName") + " -start " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -running -type process -name " + overlay.get("processName")), 0); // with doAs Assert.assertEquals(executeWithURL("entity -list -type process -doAs " + FalconTestUtil.TEST_USER_2 + " -fields status -filterBy STATUS:SUBMITTED,TYPE:process -orderBy name " + " -sortOrder asc -offset 1 -numResults 1"), 0); } @Test public void testMetadataListCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); String processName = overlay.get("processName"); String feedName = overlay.get("outputFeedName"); String clusterName = overlay.get("cluster"); Assert.assertEquals(executeWithURL(FalconCLIConstants.ENTITY_CMD + " -" + FalconCLIConstants.SCHEDULE_OPT + " -" + FalconCLIConstants.TYPE_OPT + " process -" + FalconCLIConstants.ENTITY_NAME_OPT + " " + processName), 0); Assert.assertEquals(executeWithURL(FalconCLIConstants.ENTITY_CMD + " -" + FalconCLIConstants.SCHEDULE_OPT + " -" + FalconCLIConstants.TYPE_OPT + " feed -" + FalconCLIConstants.ENTITY_NAME_OPT + " " + feedName), 0); OozieTestUtils.waitForProcessWFtoStart(context); String metadataListCommand = FalconCLIConstants.METADATA_CMD + " -" + FalconCLIConstants.LIST_OPT + " -" + FalconCLIConstants.TYPE_OPT + " "; String metadataListCommandWithDoAs = FalconCLIConstants.METADATA_CMD + " -doAs " + FalconTestUtil.TEST_USER_2 + " -" + FalconCLIConstants.LIST_OPT + " -" + FalconCLIConstants.TYPE_OPT + " "; String clusterString = " -" + FalconCLIConstants.CLUSTER_OPT + " " + clusterName; Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.CLUSTER_ENTITY.name()), 0); Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.PROCESS_ENTITY.name()), 0); Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.FEED_ENTITY.name()), 0); Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.PROCESS_ENTITY.name() + clusterString), 0); Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.FEED_ENTITY.name() + clusterString), 0); Assert.assertEquals(executeWithURL(metadataListCommand + RelationshipType.CLUSTER_ENTITY.name() + clusterString), 0); //with doAs Assert.assertEquals(executeWithURL(metadataListCommandWithDoAs + RelationshipType.FEED_ENTITY.name()), 0); Assert.assertEquals(executeWithURL(metadataListCommand + "feed"), -1); Assert.assertEquals(executeWithURL(metadataListCommand + "invalid"), -1); } @Test public void testMetadataRelationsCommands() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); String processName = overlay.get("processName"); String feedName = overlay.get("outputFeedName"); String clusterName = overlay.get("cluster"); Assert.assertEquals(executeWithURL(FalconCLIConstants.ENTITY_CMD + " -" + FalconCLIConstants.SCHEDULE_OPT + " -" + FalconCLIConstants.TYPE_OPT + " process -" + FalconCLIConstants.ENTITY_NAME_OPT + " " + processName), 0); Assert.assertEquals(executeWithURL(FalconCLIConstants.ENTITY_CMD + " -" + FalconCLIConstants.SCHEDULE_OPT + " -" + FalconCLIConstants.TYPE_OPT + " feed -" + FalconCLIConstants.ENTITY_NAME_OPT + " " + feedName), 0); OozieTestUtils.waitForProcessWFtoStart(context); String metadataRelationsCommand = FalconCLIConstants.METADATA_CMD + " -" + FalconCLIConstants.RELATIONS_OPT + " -" + FalconCLIConstants.TYPE_OPT + " "; String metadataRelationsCommandWithDoAs = FalconCLIConstants.METADATA_CMD + " -doAs " + FalconTestUtil.TEST_USER_2 + " -" + FalconCLIConstants.RELATIONS_OPT + " -" + FalconCLIConstants.TYPE_OPT + " "; Assert.assertEquals(executeWithURL(metadataRelationsCommand + RelationshipType.CLUSTER_ENTITY.name() + " -" + FalconCLIConstants.NAME_OPT + " " + clusterName), 0); Assert.assertEquals(executeWithURL(metadataRelationsCommand + RelationshipType.PROCESS_ENTITY.name() + " -" + FalconCLIConstants.NAME_OPT + " " + processName), 0); // with doAs Assert.assertEquals(executeWithURL(metadataRelationsCommandWithDoAs + RelationshipType.PROCESS_ENTITY.name() + " -" + FalconCLIConstants.NAME_OPT + " " + processName), 0); Assert.assertEquals(executeWithURL(metadataRelationsCommand + "feed -" + FalconCLIConstants.NAME_OPT + " " + clusterName), -1); Assert.assertEquals(executeWithURL(metadataRelationsCommand + "invalid -" + FalconCLIConstants.NAME_OPT + " " + clusterName), -1); Assert.assertEquals(executeWithURL(metadataRelationsCommand + RelationshipType.CLUSTER_ENTITY.name()), -1); } public void testContinue() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("instance -kill -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -kill -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE), -1); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -rerun -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); } public void testInvalidCLIInstanceCommands() throws Exception { // no command Assert.assertEquals(executeWithURL(" -kill -type process -name " + "name" + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z"), -1); Assert.assertEquals(executeWithURL("instance -kill " + "name" + " -start 2010-01-01T01:00Z -end 2010-01-01T01:00Z"), -1); Assert.assertEquals(executeWithURL("instance -kill -type process -name " + "name" + " -end 2010-01-01T03:00Z"), -1); Assert.assertEquals(executeWithURL("instance -kill -type process -name " + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z"), -1); } public void testFalconURL() throws Exception { Assert.assertEquals(new FalconCLI() .run(("instance -status -type process -name " + "processName" + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z") .split("\\s")), -1); Assert.assertEquals(new FalconCLI() .run(("instance -status -type process -name " + "processName -url http://unknownhost:1234/" + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z") .split("\\s")), -1); } public void testClientProperties() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(new FalconCLI().run(("entity -schedule -type feed -name " + overlay.get("outputFeedName") + " -url " + TestContext.BASE_URL).split("\\s+")), 0); Assert.assertEquals(new FalconCLI().run(("entity -schedule -type process -name " + overlay.get("processName") + " -url " + TestContext.BASE_URL).split("\\s+")), 0); } public void testGetVersion() throws Exception { Assert.assertEquals(new FalconCLI().run(("admin -version -url " + TestContext.BASE_URL).split("\\s")), 0); Assert.assertEquals(new FalconCLI().run(("admin -doAs " + FalconTestUtil.TEST_USER_2 + " -version -url " + TestContext.BASE_URL).split("\\s")), 0); } public void testGetStatus() throws Exception { Assert.assertEquals(new FalconCLI().run(("admin -status -url " + TestContext.BASE_URL).split("\\s")), 0); Assert.assertEquals(new FalconCLI().run(("admin -doAs " + FalconTestUtil.TEST_USER_2 + " -status -url " + TestContext.BASE_URL).split("\\s")), 0); } public void testGetThreadStackDump() throws Exception { Assert.assertEquals(new FalconCLI().run(("admin -stack -url " + TestContext.BASE_URL).split("\\s")), 0); Assert.assertEquals(new FalconCLI().run(("admin -doAs " + FalconTestUtil.TEST_USER_2 + " -stack -url " + TestContext.BASE_URL).split("\\s")), 0); } public void testInstanceGetLogs() throws Exception { TestContext context = new TestContext(); Map<String, String> overlay = context.getUniqueOverlay(); submitTestFiles(context, overlay); Assert.assertEquals(executeWithURL("entity -schedule -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -schedule -type feed -name " + overlay.get("outputFeedName")), 0); Thread.sleep(500); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -logs -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start "+ SchemaHelper.getDateFormat().format(new Date())), 0); // with doAs Assert.assertEquals(executeWithURL("instance -logs -doAs " + FalconTestUtil.TEST_USER_2 + " -type feed " + "-lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date())), 0); // test filterBy, orderBy, offset, numResults Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED -orderBy endtime " + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED -orderBy starttime " + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED -orderBy cluster " + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:WAITING -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED -orderBy endtime " + " -sortOrder invalid -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED,STARTEDAFTER:"+START_INSTANCE+" -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -logs -type process -name " + overlay.get("processName") + " -start " + START_INSTANCE + " -end " + START_INSTANCE + " -filterBy STATUS:SUCCEEDED -orderBy wrongOrder -offset 0 -numResults 1"), -1); } private int executeWithURL(String command) throws Exception { //System.out.println("COMMAND IS "+command + " -url " + TestContext.BASE_URL); return new FalconCLI() .run((command + " -url " + TestContext.BASE_URL).split("\\s+")); } private String createTempJobPropertiesFile() throws IOException { File target = new File("webapp/target"); if (!target.exists()) { target = new File("target"); } File tmpFile = File.createTempFile("job", ".properties", target); OutputStream out = new FileOutputStream(tmpFile); out.write("oozie.wf.rerun.failnodes=true\n".getBytes()); out.close(); return tmpFile.getAbsolutePath(); } private void submitTestFiles(TestContext context, Map<String, String> overlay) throws Exception { String filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay); Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); context.setCluster(overlay.get("cluster")); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay); Assert.assertEquals(executeWithURL("entity -submit -type process -file " + filePath), 0); } private static class InMemoryWriter extends PrintStream { private StringBuffer buffer = new StringBuffer(); public InMemoryWriter(OutputStream out) { super(out); } @Override public void println(String x) { clear(); buffer.append(x); super.println(x); } @SuppressWarnings("UnusedDeclaration") public String getBuffer() { return buffer.toString(); } public void clear() { buffer.delete(0, buffer.length()); } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.dtable.client.editor; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import com.google.gwt.core.client.Scheduler; import com.google.gwt.user.client.ui.IsWidget; import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52; import org.drools.workbench.screens.guided.dtable.client.editor.menu.EditMenuBuilder; import org.drools.workbench.screens.guided.dtable.client.editor.menu.InsertMenuBuilder; import org.drools.workbench.screens.guided.dtable.client.editor.menu.RadarMenuBuilder; import org.drools.workbench.screens.guided.dtable.client.editor.menu.ViewMenuBuilder; import org.drools.workbench.screens.guided.dtable.client.editor.page.ColumnsPage; import org.drools.workbench.screens.guided.dtable.client.type.GuidedDTableGraphResourceType; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableModellerView; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter.Access; import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableView; import org.drools.workbench.screens.guided.dtable.client.widget.table.events.cdi.DecisionTableSelectedEvent; import org.drools.workbench.screens.guided.dtable.client.wizard.NewGuidedDecisionTableWizardHelper; import org.drools.workbench.screens.guided.dtable.model.GuidedDecisionTableEditorContent; import org.drools.workbench.screens.guided.dtable.model.GuidedDecisionTableEditorGraphContent; import org.drools.workbench.screens.guided.dtable.model.GuidedDecisionTableEditorGraphModel; import org.drools.workbench.screens.guided.dtable.service.GuidedDecisionTableEditorService; import org.drools.workbench.screens.guided.dtable.service.GuidedDecisionTableGraphEditorService; import org.drools.workbench.screens.guided.dtable.service.GuidedDecisionTableGraphSaveAndRenameService; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.messageconsole.client.console.widget.button.AlertsButtonMenuItemBuilder; import org.jboss.errai.bus.client.api.messaging.Message; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.common.client.api.RemoteCallback; import org.jboss.errai.ioc.client.container.SyncBeanManager; import org.kie.soup.project.datamodel.imports.Imports; import org.kie.workbench.common.services.shared.project.KieModuleService; import org.kie.workbench.common.widgets.client.callbacks.CommandDrivenErrorCallback; import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle; import org.kie.workbench.common.widgets.client.popups.validation.ValidationPopup; import org.kie.workbench.common.widgets.metadata.client.validation.AssetUpdateValidator; import org.kie.workbench.common.widgets.metadata.client.widget.OverviewWidgetPresenter; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.client.annotations.WorkbenchEditor; import org.uberfire.client.annotations.WorkbenchMenu; import org.uberfire.client.annotations.WorkbenchPartTitle; import org.uberfire.client.annotations.WorkbenchPartView; import org.uberfire.client.callbacks.Callback; import org.uberfire.client.mvp.LockManager; import org.uberfire.client.mvp.LockTarget; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.client.mvp.SaveInProgressEvent; import org.uberfire.client.mvp.UpdatedLockStatusEvent; import org.uberfire.client.workbench.events.ChangeTitleWidgetEvent; import org.uberfire.ext.editor.commons.client.menu.MenuItems; import org.uberfire.ext.editor.commons.client.menu.common.SaveAndRenameCommandBuilder; import org.uberfire.ext.editor.commons.client.resources.i18n.CommonConstants; import org.uberfire.ext.editor.commons.version.events.RestoreEvent; import org.uberfire.ext.widgets.common.client.callbacks.DefaultErrorCallback; import org.uberfire.ext.widgets.common.client.callbacks.HasBusyIndicatorDefaultErrorCallback; import org.uberfire.lifecycle.OnClose; import org.uberfire.lifecycle.OnFocus; import org.uberfire.lifecycle.OnMayClose; import org.uberfire.lifecycle.OnStartup; import org.uberfire.mvp.Command; import org.uberfire.mvp.ParameterizedCommand; import org.uberfire.mvp.PlaceRequest; import org.uberfire.mvp.impl.PathPlaceRequest; import org.uberfire.workbench.events.NotificationEvent; import org.uberfire.workbench.model.menu.Menus; import static org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter.Access.LockedBy.CURRENT_USER; import static org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter.Access.LockedBy.NOBODY; import static org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTablePresenter.Access.LockedBy.OTHER_USER; import static org.uberfire.client.annotations.WorkbenchEditor.LockingStrategy.EDITOR_PROVIDED; import static org.uberfire.ext.widgets.common.client.common.ConcurrentChangePopup.newConcurrentDelete; import static org.uberfire.ext.widgets.common.client.common.ConcurrentChangePopup.newConcurrentRename; import static org.uberfire.ext.widgets.common.client.common.ConcurrentChangePopup.newConcurrentUpdate; /** * Guided Decision Table Graph Editor Presenter */ @Dependent @WorkbenchEditor(identifier = "GuidedDecisionTableGraphEditor", supportedTypes = {GuidedDTableGraphResourceType.class}, lockingStrategy = EDITOR_PROVIDED) public class GuidedDecisionTableGraphEditorPresenter extends BaseGuidedDecisionTableEditorPresenter { private final Caller<GuidedDecisionTableGraphEditorService> graphService; private final Caller<KieModuleService> moduleService; private final Caller<GuidedDecisionTableGraphSaveAndRenameService> graphSaveAndRenameService; private final Event<SaveInProgressEvent> saveInProgressEvent; private final LockManager lockManager; private final SaveAndRenameCommandBuilder<List<GuidedDecisionTableEditorContent>, Metadata> saveAndRenameCommandBuilder; protected ObservablePath.OnConcurrentUpdateEvent concurrentUpdateSessionInfo = null; protected Access access = new Access(); protected Integer originalGraphHash; private GuidedDecisionTableEditorGraphContent content; private LoadGraphLatch loadGraphLatch = null; private SaveGraphLatch saveGraphLatch = null; private NewGuidedDecisionTableWizardHelper helper; @Inject public GuidedDecisionTableGraphEditorPresenter(final View view, final Caller<GuidedDecisionTableEditorService> service, final Caller<GuidedDecisionTableGraphEditorService> graphService, final Caller<KieModuleService> moduleService, final Caller<GuidedDecisionTableGraphSaveAndRenameService> graphSaveAndRenameService, final Event<NotificationEvent> notification, final Event<SaveInProgressEvent> saveInProgressEvent, final Event<DecisionTableSelectedEvent> decisionTableSelectedEvent, final ValidationPopup validationPopup, final GuidedDTableGraphResourceType dtGraphResourceType, final EditMenuBuilder editMenuBuilder, final ViewMenuBuilder viewMenuBuilder, final InsertMenuBuilder insertMenuBuilder, final RadarMenuBuilder radarMenuBuilder, final GuidedDecisionTableModellerView.Presenter modeller, final NewGuidedDecisionTableWizardHelper helper, final SyncBeanManager beanManager, final PlaceManager placeManager, final LockManager lockManager, final ColumnsPage columnsPage, final SaveAndRenameCommandBuilder<List<GuidedDecisionTableEditorContent>, Metadata> saveAndRenameCommandBuilder, final AlertsButtonMenuItemBuilder alertsButtonMenuItemBuilder) { super(view, service, notification, decisionTableSelectedEvent, validationPopup, dtGraphResourceType, editMenuBuilder, viewMenuBuilder, insertMenuBuilder, radarMenuBuilder, modeller, beanManager, placeManager, columnsPage, alertsButtonMenuItemBuilder); this.graphService = graphService; this.moduleService = moduleService; this.saveInProgressEvent = saveInProgressEvent; this.helper = helper; this.lockManager = lockManager; this.graphSaveAndRenameService = graphSaveAndRenameService; this.saveAndRenameCommandBuilder = saveAndRenameCommandBuilder; } @PostConstruct public void init() { super.init(); //Selecting a Decision Table in the document selector fires a selection event registeredDocumentsMenuBuilder.setActivateDocumentCommand((document) -> { final GuidedDecisionTablePresenter dtPresenter = ((GuidedDecisionTablePresenter) document); decisionTableSelectedEvent.fire(new DecisionTableSelectedEvent(dtPresenter)); }); //Removing a Decision Table from the document selector is equivalent to closing the editor registeredDocumentsMenuBuilder.setRemoveDocumentCommand((document) -> { final GuidedDecisionTablePresenter dtPresenter = ((GuidedDecisionTablePresenter) document); if (mayClose(dtPresenter)) { removeDocument(dtPresenter); } }); registeredDocumentsMenuBuilder.setNewDocumentCommand(this::onNewDocument); } void onNewDocument() { moduleService.call((org.guvnor.common.services.project.model.Package pkg) -> { helper.createNewGuidedDecisionTable(pkg.getPackageMainResourcesPath(), "", GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY, GuidedDecisionTable52.HitPolicy.NONE, view, (path) -> onOpenDocumentsInEditor(Collections.singletonList(path))); }).resolvePackage(editorPath); } @Override @OnStartup public void onStartup(final ObservablePath path, final PlaceRequest placeRequest) { super.onStartup(path, placeRequest); initialiseEditor(path, placeRequest); } @Override @OnFocus public void onFocus() { super.onFocus(); } @Override public void loadDocument(final ObservablePath path, final PlaceRequest placeRequest) { throw new UnsupportedOperationException(); } void initialiseEditor(final ObservablePath path, final PlaceRequest placeRequest) { this.access.setReadOnly(placeRequest.getParameter("readOnly", null) != null); initialiseLockManager(); initialiseVersionManager(); addFileChangeListeners(path); loadDocumentGraph(path); } void initialiseVersionManager() { versionRecordManager.init(null, editorPath, (versionRecord) -> { versionRecordManager.setVersion(versionRecord.id()); access.setReadOnly(!versionRecordManager.isLatest(versionRecord)); registeredDocumentsMenuBuilder.setReadOnly(isReadOnly()); reload(); }); } void loadDocumentGraph(final ObservablePath path) { view.showLoading(); view.refreshTitle(getTitleText()); graphService.call(getLoadGraphContentSuccessCallback(), getNoSuchFileExceptionErrorCallback()).loadContent(path); } private RemoteCallback<GuidedDecisionTableEditorGraphContent> getLoadGraphContentSuccessCallback() { return (content) -> { this.content = content; this.originalGraphHash = content.getModel().hashCode(); this.concurrentUpdateSessionInfo = null; final GuidedDecisionTableEditorGraphModel model = content.getModel(); final Set<GuidedDecisionTableEditorGraphModel.GuidedDecisionTableGraphEntry> modelEntries = model.getEntries(); initialiseEditorTabsWhenNoDocuments(); if (modelEntries == null || modelEntries.isEmpty()) { view.hideBusyIndicator(); return; } loadGraphLatch = new LoadGraphLatch(modelEntries.size(), getSelectDecisionTableCommand(modelEntries.iterator().next().getPathHead()), () -> { originalGraphHash = buildModelFromEditor().hashCode(); modeller.getView().getGridPanel().setFocus(true); }); modelEntries.stream().forEach(loadGraphLatch::loadDocumentGraphEntry); }; } private ParameterizedCommand<GuidedDecisionTableView.Presenter> getSelectDecisionTableCommand(final Path dtToSelectPath) { return (dtPresenter) -> { if (dtPresenter.getCurrentPath().getOriginal().equals(dtToSelectPath)) { decisionTableSelectedEvent.fire(new DecisionTableSelectedEvent(dtPresenter, false)); } }; } PathPlaceRequest getPathPlaceRequest(final Path path) { return new PathPlaceRequest(path); } void initialiseLockManager() { lockManager.init(new LockTarget(editorPath, view.asWidget(), editorPlaceRequest, () -> editorPath.getFileName() + " - " + resourceType.getDescription(), () -> {/*nothing*/})); } @Override @WorkbenchPartTitle public String getTitleText() { return versionRecordManager.getCurrentPath().getFileName() + " - " + resourceType.getDescription(); } @Override @WorkbenchPartView public IsWidget getWidget() { return super.getWidget(); } @Override @WorkbenchMenu public Menus getMenus() { return super.getMenus(); } @Override @OnMayClose public boolean mayClose() { setMayCloseHandler(this::doMayCloseGraph); boolean mayClose = mayClose(originalGraphHash, buildModelFromEditor().hashCode()); setMayCloseHandler(this::doMayCloseDocument); mayClose = mayClose && super.mayClose(); return mayClose; } private boolean doMayCloseGraph(final Integer originalHashCode, final Integer currentHashCode) { if (this.isDirty(originalHashCode, currentHashCode) || overviewWidget.isDirty()) { return this.editorView.confirmClose(); } return true; } private boolean doMayCloseDocument(final Integer originalHashCode, final Integer currentHashCode) { if (this.isDirty(originalHashCode, currentHashCode)) { return this.editorView.confirmClose(); } return true; } GuidedDecisionTableEditorGraphModel buildModelFromEditor() { final GuidedDecisionTableEditorGraphModel model = new GuidedDecisionTableEditorGraphModel(); for (GuidedDecisionTableView.Presenter dtPresenter : modeller.getAvailableDecisionTables()) { model.getEntries().add(new GuidedDecisionTableEditorGraphModel.GuidedDecisionTableGraphEntry(dtPresenter.getLatestPath(), dtPresenter.getCurrentPath(), dtPresenter.getView().getX(), dtPresenter.getView().getY())); } return model; } @Override @OnClose public void onClose() { lockManager.releaseLock(); super.onClose(); } @Override protected void onDecisionTableSelected(final @Observes DecisionTableSelectedEvent event) { final Optional<GuidedDecisionTableView.Presenter> dtPresenter = event.getPresenter(); if (!dtPresenter.isPresent()) { initialiseEditorTabsWhenNoDocuments(); } super.onDecisionTableSelected(event); if (event.isLockRequired()) { if (!isReadOnly()) { lockManager.acquireLock(); } } } @Override public void makeMenuBar() { if (canUpdateProject()) { fileMenuBuilder .addSave(getSaveMenuItem()) .addCopy(versionRecordManager::getCurrentPath, assetUpdateValidator) .addRename(getSaveAndRenameCommand()) .addDelete(versionRecordManager::getPathToLatest, assetUpdateValidator); } this.menus = fileMenuBuilder .addValidate(() -> onValidate(getActiveDocument())) .addNewTopLevelMenu(getEditMenuItem()) .addNewTopLevelMenu(getViewMenuItem()) .addNewTopLevelMenu(getInsertMenuItem()) .addNewTopLevelMenu(getRadarMenuItem()) .addNewTopLevelMenu(getRegisteredDocumentsMenuItem()) .addNewTopLevelMenu(getVersionManagerMenuItem()) .addNewTopLevelMenu(alertsButtonMenuItemBuilder.build()) .build(); } protected Command getSaveAndRenameCommand() { return saveAndRenameCommandBuilder .addPathSupplier(getPathSupplier()) .addValidator(getValidator()) .addRenameService(getGraphSaveAndRenameService()) .addMetadataSupplier(getMetadataSupplier()) .addContentSupplier(getContentSupplier()) .addIsDirtySupplier(getIsDirtySupplier()) .addSuccessCallback(onSuccess()) .build(); } Supplier<Path> getPathSupplier() { return () -> versionRecordManager.getPathToLatest(); } AssetUpdateValidator getValidator() { return assetUpdateValidator; } Caller<GuidedDecisionTableGraphSaveAndRenameService> getGraphSaveAndRenameService() { return graphSaveAndRenameService; } Supplier<Metadata> getMetadataSupplier() { return () -> { final Overview overview = getActiveDocument().getOverview(); return overview.getMetadata(); }; } Supplier<List<GuidedDecisionTableEditorContent>> getContentSupplier() { return () -> getAvailableDecisionTables() .stream() .map(c -> new GuidedDecisionTableEditorContent(c.getModel(), c.getOverview(), c.getCurrentPath(), c.getLatestPath())) .collect(Collectors.toList()); } Supplier<Boolean> getIsDirtySupplier() { return () -> isGuidedDecisionTablesDirty() || isGraphDirty() || isOverviewWidgetDirty(); } boolean isGuidedDecisionTablesDirty() { return getAvailableDecisionTables().stream().anyMatch(dtPresenter -> { final Integer originalHashCode = originalHashCode(dtPresenter); final Integer currentHashCode = currentHashCode(dtPresenter); return isDirty(originalHashCode, currentHashCode); }); } boolean isGraphDirty() { return isDirty(originalGraphHash, getCurrentHashCode()); } int getCurrentHashCode() { return buildModelFromEditor().hashCode(); } boolean isOverviewWidgetDirty() { return getOverviewWidget().isDirty(); } OverviewWidgetPresenter getOverviewWidget() { return overviewWidget; } int originalHashCode(final GuidedDecisionTableView.Presenter dtPresenter) { return dtPresenter.getOriginalHashCode(); } int currentHashCode(final GuidedDecisionTableView.Presenter dtPresenter) { return dtPresenter.getModel().hashCode(); } Set<GuidedDecisionTableView.Presenter> getAvailableDecisionTables() { return modeller.getAvailableDecisionTables(); } ParameterizedCommand<Path> onSuccess() { return (path) -> { final Set<GuidedDecisionTableView.Presenter> allDecisionTables = new HashSet<>(modeller.getAvailableDecisionTables()); final int size = allDecisionTables.size(); final SaveGraphLatch saveGraphLatch = new SaveGraphLatch(size, "Sava and Rename"); saveGraphLatch.saveDocumentGraph(path); }; } @Override protected void enableMenus(final boolean enabled) { super.enableMenus(enabled); getRegisteredDocumentsMenuItem().setEnabled(enabled); } @Override public void getAvailableDocumentPaths(final Callback<List<Path>> callback) { view.showLoading(); graphService.call(new RemoteCallback<List<Path>>() { @Override public void callback(final List<Path> paths) { view.hideBusyIndicator(); callback.callback(paths); } }, new HasBusyIndicatorDefaultErrorCallback(view)).listDecisionTablesInPackage(editorPath); } @Override public void onOpenDocumentsInEditor(final List<Path> selectedDocumentPaths) { if (selectedDocumentPaths == null || selectedDocumentPaths.isEmpty()) { return; } view.showLoading(); loadGraphLatch = new LoadGraphLatch(selectedDocumentPaths.size(), getSelectDecisionTableCommand(selectedDocumentPaths.get(0))); selectedDocumentPaths.stream().forEach((p) -> { final PathPlaceRequest placeRequest = getPathPlaceRequest(p); loadGraphLatch.loadDocument(placeRequest.getPath(), placeRequest); }); } @Override protected void doSave() { if (isReadOnly()) { if (versionRecordManager.isCurrentLatest()) { view.alertReadOnly(); return; } else { versionRecordManager.restoreToCurrentVersion(); return; } } final Set<GuidedDecisionTableView.Presenter> allDecisionTables = new HashSet<>(modeller.getAvailableDecisionTables()); final Set<ObservablePath.OnConcurrentUpdateEvent> concurrentUpdateSessionInfos = new HashSet<>(); allDecisionTables.stream().forEach(dtPresenter -> { final ObservablePath.OnConcurrentUpdateEvent concurrentUpdateSessionInfo = dtPresenter.getConcurrentUpdateSessionInfo(); if (concurrentUpdateSessionInfo != null) { concurrentUpdateSessionInfos.add(concurrentUpdateSessionInfo); } }); if (concurrentUpdateSessionInfo != null) { concurrentUpdateSessionInfos.add(concurrentUpdateSessionInfo); } if (!concurrentUpdateSessionInfos.isEmpty()) { showConcurrentUpdatesPopup(); } else { saveDocumentGraphEntries(); } } void showConcurrentUpdatesPopup() { newConcurrentUpdate(concurrentUpdateSessionInfo.getPath(), concurrentUpdateSessionInfo.getIdentity(), this::saveDocumentGraphEntries, () -> {/*Do nothing*/}, this::reload).show(); } void saveDocumentGraphEntries() { final Set<GuidedDecisionTableView.Presenter> allDecisionTables = new HashSet<>(modeller.getAvailableDecisionTables()); savePopUpPresenter.show(editorPath, (commitMessage) -> { editorView.showSaving(); saveGraphLatch = new SaveGraphLatch(allDecisionTables.size(), commitMessage); if (allDecisionTables.isEmpty()) { saveGraphLatch.saveDocumentGraph(); } else { allDecisionTables.stream().forEach((dtPresenter) -> { saveGraphLatch.saveDocumentGraphEntry(dtPresenter); saveInProgressEvent.fire(new SaveInProgressEvent(dtPresenter.getLatestPath())); }); } }); } @Override protected void initialiseVersionManager(final GuidedDecisionTableView.Presenter dtPresenter) { //Do nothing. We maintain a single VersionRecordManager for the graph itself. } @Override protected void initialiseKieEditorTabs(final GuidedDecisionTableView.Presenter document, final Overview overview, final AsyncPackageDataModelOracle dmo, final Imports imports, final boolean isReadOnly) { kieEditorWrapperView.clear(); kieEditorWrapperView.addMainEditorPage(editorView); kieEditorWrapperView.addOverviewPage(overviewWidget, () -> overviewWidget.refresh(versionRecordManager.getVersion())); kieEditorWrapperView.addSourcePage(sourceWidget); kieEditorWrapperView.addImportsTab(importsWidget); overviewWidget.setContent(content.getOverview(), versionRecordManager.getPathToLatest()); importsWidget.setContent(dmo, imports, isReadOnly); } void initialiseEditorTabsWhenNoDocuments() { getEditMenuItem().setEnabled(false); getViewMenuItem().setEnabled(false); getInsertMenuItem().setEnabled(false); getRadarMenuItem().setEnabled(false); enableMenuItem(false, MenuItems.VALIDATE); kieEditorWrapperView.clear(); kieEditorWrapperView.addMainEditorPage(editorView); kieEditorWrapperView.addOverviewPage(overviewWidget, () -> overviewWidget.refresh(versionRecordManager.getVersion())); overviewWidget.setContent(content.getOverview(), versionRecordManager.getPathToLatest()); } void addFileChangeListeners(final ObservablePath path) { path.onRename(this::onRename); path.onDelete(this::onDelete); path.onConcurrentUpdate((info) -> concurrentUpdateSessionInfo = info); path.onConcurrentRename((info) -> newConcurrentRename(info.getSource(), info.getTarget(), info.getIdentity(), () -> enableMenus(false), this::reload).show()); path.onConcurrentDelete((info) -> newConcurrentDelete(info.getPath(), info.getIdentity(), () -> enableMenus(false), () -> placeManager.closePlace(editorPlaceRequest)).show()); } void onDelete() { scheduleClosure(() -> placeManager.forceClosePlace(editorPlaceRequest)); } void scheduleClosure(final Scheduler.ScheduledCommand command) { Scheduler.get().scheduleDeferred(command); } void onRename() { reload(); changeTitleEvent.fire(new ChangeTitleWidgetEvent(editorPlaceRequest, getTitleText(), editorView.getTitleWidget())); } void reload() { final List<GuidedDecisionTableView.Presenter> documents = new ArrayList<>(this.documents); documents.stream().forEach(this::deregisterDocument); modeller.getView().clear(); modeller.releaseDecisionTables(); loadDocumentGraph(versionRecordManager.getCurrentPath()); } void onRestore(final @Observes RestoreEvent restore) { if (versionRecordManager.getCurrentPath() == null || restore == null || restore.getPath() == null) { return; } if (versionRecordManager.getCurrentPath().equals(restore.getPath())) { initialiseEditor(versionRecordManager.getPathToLatest(), editorPlaceRequest); notification.fire(new NotificationEvent(CommonConstants.INSTANCE.ItemRestored())); } } private boolean isReadOnly() { return !this.access.isEditable(); } void onUpdatedLockStatusEvent(final @Observes UpdatedLockStatusEvent event) { super.onUpdatedLockStatusEvent(event); if (editorPath == null) { return; } if (editorPath.equals(event.getFile())) { if (event.isLocked()) { access.setLock(event.isLockedByCurrentUser() ? CURRENT_USER : OTHER_USER); } else { access.setLock(NOBODY); } } } private class LoadGraphLatch { private int dtGraphElementCount; private Command onAllDocumentGraphEntriesLoadedCommand; private ParameterizedCommand<GuidedDecisionTableView.Presenter> onDocumentGraphEntryLoadedCommand; private LoadGraphLatch(final int dtGraphElementCount, final ParameterizedCommand<GuidedDecisionTableView.Presenter> onDocumentGraphEntryLoadedCommand) { this(dtGraphElementCount, onDocumentGraphEntryLoadedCommand, () -> {/*Do nothing*/}); } private LoadGraphLatch(final int dtGraphElementCount, final ParameterizedCommand<GuidedDecisionTableView.Presenter> onDocumentGraphEntryLoadedCommand, final Command onAllDocumentGraphEntriesLoadedCommand) { this.dtGraphElementCount = dtGraphElementCount; this.onDocumentGraphEntryLoadedCommand = onDocumentGraphEntryLoadedCommand; this.onAllDocumentGraphEntriesLoadedCommand = onAllDocumentGraphEntriesLoadedCommand; } private void onDocumentGraphEntryLoaded(final GuidedDecisionTableView.Presenter dtPresenter) { if (onDocumentGraphEntryLoadedCommand != null) { onDocumentGraphEntryLoadedCommand.execute(dtPresenter); } } private void hideLoadingIndicator() { dtGraphElementCount--; if (dtGraphElementCount == 0) { if (onAllDocumentGraphEntriesLoadedCommand != null) { onAllDocumentGraphEntriesLoadedCommand.execute(); } view.hideBusyIndicator(); } } private void loadDocumentGraphEntry(final GuidedDecisionTableEditorGraphModel.GuidedDecisionTableGraphEntry entry) { final PathPlaceRequest placeRequest = getPathPlaceRequest(entry.getPathHead()); final ObservablePath pathHead = placeRequest.getPath(); final Path pathVersion = entry.getPathVersion(); final Double x = entry.getX(); final Double y = entry.getY(); if (isReadOnly()) { placeRequest.addParameter("readOnly", ""); } service.call(getLoadDocumentGraphEntryContentSuccessCallback(pathHead, placeRequest, x, y), getLoadErrorCallback()).loadContent(pathVersion); } private RemoteCallback<GuidedDecisionTableEditorContent> getLoadDocumentGraphEntryContentSuccessCallback(final ObservablePath path, final PlaceRequest placeRequest, final Double x, final Double y) { return (content) -> { //Path is set to null when the Editor is closed (which can happen before async calls complete). if (path == null) { return; } //Add Decision Table to modeller final GuidedDecisionTableView.Presenter dtPresenter = modeller.addDecisionTable(path, placeRequest, content, placeRequest.getParameter("readOnly", null) != null, x, y); registerDocument(dtPresenter); onDocumentGraphEntryLoaded(dtPresenter); hideLoadingIndicator(); }; } private void loadDocument(final ObservablePath path, final PlaceRequest placeRequest) { service.call(getLoadContentSuccessCallback(path, placeRequest), getLoadErrorCallback()).loadContent(path); } private RemoteCallback<GuidedDecisionTableEditorContent> getLoadContentSuccessCallback(final ObservablePath path, final PlaceRequest placeRequest) { return (content) -> { //Path is set to null when the Editor is closed (which can happen before async calls complete). if (path == null) { return; } //Add Decision Table to modeller final GuidedDecisionTableView.Presenter dtPresenter = modeller.addDecisionTable(path, placeRequest, content, placeRequest.getParameter("readOnly", null) != null, null, null); registerDocument(dtPresenter); onDocumentGraphEntryLoaded(dtPresenter); hideLoadingIndicator(); }; } private DefaultErrorCallback getLoadErrorCallback() { final CommandDrivenErrorCallback wrapped = getNoSuchFileExceptionErrorCallback(); final DefaultErrorCallback callback = new DefaultErrorCallback() { @Override public boolean error(final Message message, final Throwable throwable) { hideLoadingIndicator(); return wrapped.error(message, throwable); } }; return callback; } } private class SaveGraphLatch { private final String commitMessage; private int dtGraphElementCount = 0; private SaveGraphLatch(final int dtGraphElementCount, final String commitMessage) { this.dtGraphElementCount = dtGraphElementCount; this.commitMessage = commitMessage; } private void saveDocumentGraph() { dtGraphElementCount--; if (dtGraphElementCount > 0) { return; } saveDocumentGraph(editorPath); } private void saveDocumentGraph(final Path editorPath) { final GuidedDecisionTableEditorGraphModel model = buildModelFromEditor(); graphService.call(new RemoteCallback<Path>() { @Override public void callback(final Path path) { editorView.hideBusyIndicator(); versionRecordManager.reloadVersions(path); originalGraphHash = model.hashCode(); concurrentUpdateSessionInfo = null; notificationEvent.fire(new NotificationEvent(CommonConstants.INSTANCE.ItemSavedSuccessfully())); } }, new HasBusyIndicatorDefaultErrorCallback(view)).save(editorPath, model, content.getOverview().getMetadata(), commitMessage); } private void saveDocumentGraphEntry(final GuidedDecisionTableView.Presenter dtPresenter) { final ObservablePath path = dtPresenter.getCurrentPath(); final GuidedDecisionTable52 model = dtPresenter.getModel(); final Metadata metadata = dtPresenter.getOverview().getMetadata(); service.call(getSaveSuccessCallback(dtPresenter, model.hashCode()), getSaveErrorCallback()).save(path, model, metadata, commitMessage); } private RemoteCallback<Path> getSaveSuccessCallback(final GuidedDecisionTableView.Presenter document, final int currentHashCode) { return (path) -> { document.setConcurrentUpdateSessionInfo(null); document.setOriginalHashCode(currentHashCode); saveDocumentGraph(); }; } private DefaultErrorCallback getSaveErrorCallback() { return new HasBusyIndicatorDefaultErrorCallback(view) { @Override public boolean error(final Message message, final Throwable throwable) { saveDocumentGraph(); return super.error(message, throwable); } }; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.block.BlockSerdeUtil; import com.facebook.presto.operator.scalar.VarbinaryFunctions; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.BlockEncodingSerde; import com.facebook.presto.spi.function.Signature; import com.facebook.presto.spi.relation.FullyQualifiedName; import com.facebook.presto.spi.relation.RowExpression; import com.facebook.presto.spi.type.ArrayType; import com.facebook.presto.spi.type.CharType; import com.facebook.presto.spi.type.DecimalType; import com.facebook.presto.spi.type.Decimals; import com.facebook.presto.spi.type.FunctionType; import com.facebook.presto.spi.type.MapType; import com.facebook.presto.spi.type.RowType; import com.facebook.presto.spi.type.SqlDate; import com.facebook.presto.spi.type.StandardTypes; import com.facebook.presto.spi.type.Type; import com.facebook.presto.spi.type.TypeSignature; import com.facebook.presto.spi.type.VarcharType; import com.facebook.presto.sql.tree.ArithmeticUnaryExpression; import com.facebook.presto.sql.tree.BooleanLiteral; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.DecimalLiteral; import com.facebook.presto.sql.tree.DoubleLiteral; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.GenericLiteral; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.NullLiteral; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.StringLiteral; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.primitives.Primitives; import io.airlift.slice.DynamicSliceOutput; import io.airlift.slice.Slice; import io.airlift.slice.SliceOutput; import io.airlift.slice.SliceUtf8; import java.util.List; import java.util.Set; import static com.facebook.presto.metadata.BuiltInFunctionNamespaceManager.DEFAULT_NAMESPACE; import static com.facebook.presto.spi.function.FunctionKind.SCALAR; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DateType.DATE; import static com.facebook.presto.spi.type.Decimals.isShortDecimal; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.IntegerType.INTEGER; import static com.facebook.presto.spi.type.RealType.REAL; import static com.facebook.presto.spi.type.SmallintType.SMALLINT; import static com.facebook.presto.spi.type.TinyintType.TINYINT; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.sql.relational.Expressions.constant; import static com.facebook.presto.sql.relational.Expressions.constantNull; import static com.facebook.presto.type.UnknownType.UNKNOWN; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.Float.intBitsToFloat; import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; public final class LiteralEncoder { // hack: java classes for types that can be used with magic literals public static final String MAGIC_LITERAL_FUNCTION_PREFIX = "$literal$"; private static final Set<Class<?>> SUPPORTED_LITERAL_TYPES = ImmutableSet.of(long.class, double.class, Slice.class, boolean.class); private final BlockEncodingSerde blockEncodingSerde; public LiteralEncoder(BlockEncodingSerde blockEncodingSerde) { this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null"); } public List<Expression> toExpressions(List<?> objects, List<? extends Type> types) { requireNonNull(objects, "objects is null"); requireNonNull(types, "types is null"); checkArgument(objects.size() == types.size(), "objects and types do not have the same size"); ImmutableList.Builder<Expression> expressions = ImmutableList.builder(); for (int i = 0; i < objects.size(); i++) { Object object = objects.get(i); Type type = types.get(i); expressions.add(toExpression(object, type)); } return expressions.build(); } // Unlike toExpression, toRowExpression should be very straightforward given object is serializable public static RowExpression toRowExpression(Object object, Type type) { requireNonNull(type, "type is null"); if (object instanceof RowExpression) { return (RowExpression) object; } if (object == null) { return constantNull(type); } return constant(object, type); } @Deprecated public Expression toExpression(Object object, Type type) { requireNonNull(type, "type is null"); if (object instanceof Expression) { return (Expression) object; } if (object == null) { if (type.equals(UNKNOWN)) { return new NullLiteral(); } return new Cast(new NullLiteral(), type.getTypeSignature().toString(), false, true); } if (type.equals(TINYINT)) { return new GenericLiteral("TINYINT", object.toString()); } if (type.equals(SMALLINT)) { return new GenericLiteral("SMALLINT", object.toString()); } if (type.equals(INTEGER)) { return new LongLiteral(object.toString()); } if (type.equals(BIGINT)) { LongLiteral expression = new LongLiteral(object.toString()); if (expression.getValue() >= Integer.MIN_VALUE && expression.getValue() <= Integer.MAX_VALUE) { return new GenericLiteral("BIGINT", object.toString()); } return new LongLiteral(object.toString()); } checkArgument(Primitives.wrap(type.getJavaType()).isInstance(object), "object.getClass (%s) and type.getJavaType (%s) do not agree", object.getClass(), type.getJavaType()); if (type.equals(DOUBLE)) { Double value = (Double) object; // WARNING: the ORC predicate code depends on NaN and infinity not appearing in a tuple domain, so // if you remove this, you will need to update the TupleDomainOrcPredicate // When changing this, don't forget about similar code for REAL below if (value.isNaN()) { return new FunctionCall(QualifiedName.of("nan"), ImmutableList.of()); } if (value.equals(Double.NEGATIVE_INFINITY)) { return ArithmeticUnaryExpression.negative(new FunctionCall(QualifiedName.of("infinity"), ImmutableList.of())); } if (value.equals(Double.POSITIVE_INFINITY)) { return new FunctionCall(QualifiedName.of("infinity"), ImmutableList.of()); } return new DoubleLiteral(object.toString()); } if (type.equals(REAL)) { Float value = intBitsToFloat(((Long) object).intValue()); // WARNING for ORC predicate code as above (for double) if (value.isNaN()) { return new Cast(new FunctionCall(QualifiedName.of("nan"), ImmutableList.of()), StandardTypes.REAL); } if (value.equals(Float.NEGATIVE_INFINITY)) { return ArithmeticUnaryExpression.negative(new Cast(new FunctionCall(QualifiedName.of("infinity"), ImmutableList.of()), StandardTypes.REAL)); } if (value.equals(Float.POSITIVE_INFINITY)) { return new Cast(new FunctionCall(QualifiedName.of("infinity"), ImmutableList.of()), StandardTypes.REAL); } return new GenericLiteral("REAL", value.toString()); } if (type instanceof DecimalType) { String string; if (isShortDecimal(type)) { string = Decimals.toString((long) object, ((DecimalType) type).getScale()); } else { string = Decimals.toString((Slice) object, ((DecimalType) type).getScale()); } return new Cast(new DecimalLiteral(string), type.getDisplayName()); } if (type instanceof VarcharType) { VarcharType varcharType = (VarcharType) type; Slice value = (Slice) object; StringLiteral stringLiteral = new StringLiteral(value.toStringUtf8()); if (!varcharType.isUnbounded() && varcharType.getLengthSafe() == SliceUtf8.countCodePoints(value)) { return stringLiteral; } return new Cast(stringLiteral, type.getDisplayName(), false, true); } if (type instanceof CharType) { StringLiteral stringLiteral = new StringLiteral(((Slice) object).toStringUtf8()); return new Cast(stringLiteral, type.getDisplayName(), false, true); } if (type.equals(BOOLEAN)) { return new BooleanLiteral(object.toString()); } if (type.equals(DATE)) { return new GenericLiteral("DATE", new SqlDate(toIntExact((Long) object)).toString()); } if (object instanceof Block) { SliceOutput output = new DynamicSliceOutput(toIntExact(((Block) object).getSizeInBytes())); BlockSerdeUtil.writeBlock(blockEncodingSerde, output, (Block) object); object = output.slice(); // This if condition will evaluate to true: object instanceof Slice && !type.equals(VARCHAR) } Signature signature = getMagicLiteralFunctionSignature(type); if (object instanceof Slice) { // HACK: we need to serialize VARBINARY in a format that can be embedded in an expression to be // able to encode it in the plan that gets sent to workers. // We do this by transforming the in-memory varbinary into a call to from_base64(<base64-encoded value>) FunctionCall fromBase64 = new FunctionCall(QualifiedName.of("from_base64"), ImmutableList.of(new StringLiteral(VarbinaryFunctions.toBase64((Slice) object).toStringUtf8()))); return new FunctionCall(QualifiedName.of(signature.getNameSuffix()), ImmutableList.of(fromBase64)); } Expression rawLiteral = toExpression(object, typeForMagicLiteral(type)); return new FunctionCall(QualifiedName.of(signature.getNameSuffix()), ImmutableList.of(rawLiteral)); } public static boolean isSupportedLiteralType(Type type) { if (type instanceof FunctionType) { // FunctionType contains compiled lambda thus not serializable. return false; } if (type instanceof ArrayType) { return isSupportedLiteralType(((ArrayType) type).getElementType()); } else if (type instanceof RowType) { RowType rowType = (RowType) type; return rowType.getTypeParameters().stream() .allMatch(LiteralEncoder::isSupportedLiteralType); } else if (type instanceof MapType) { MapType mapType = (MapType) type; return isSupportedLiteralType(mapType.getKeyType()) && isSupportedLiteralType(mapType.getValueType()); } return SUPPORTED_LITERAL_TYPES.contains(type.getJavaType()); } public static long estimatedSizeInBytes(Object object) { if (object == null) { return 1; } Class<?> javaType = object.getClass(); if (javaType == Long.class) { return Long.BYTES; } else if (javaType == Double.class) { return Double.BYTES; } else if (javaType == Boolean.class) { return 1; } else if (object instanceof Block) { return ((Block) object).getSizeInBytes(); } else if (object instanceof Slice) { return ((Slice) object).length(); } // unknown for rest of types return Integer.MAX_VALUE; } public static Signature getMagicLiteralFunctionSignature(Type type) { TypeSignature argumentType = typeForMagicLiteral(type).getTypeSignature(); return new Signature(FullyQualifiedName.of(DEFAULT_NAMESPACE, MAGIC_LITERAL_FUNCTION_PREFIX + type.getTypeSignature()), SCALAR, type.getTypeSignature(), argumentType); } private static Type typeForMagicLiteral(Type type) { Class<?> clazz = type.getJavaType(); clazz = Primitives.unwrap(clazz); if (clazz == long.class) { return BIGINT; } if (clazz == double.class) { return DOUBLE; } if (!clazz.isPrimitive()) { if (type instanceof VarcharType) { return type; } else { return VARBINARY; } } if (clazz == boolean.class) { return BOOLEAN; } throw new IllegalArgumentException("Unhandled Java type: " + clazz.getName()); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * Activity.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * <p>An activity is a specific user action that an advertiser wants * to track, such as the * completion of a purchase or a visit to a webpage. You * create and manage activities in Ad Manager. * When a user performs the action after seeing an advertiser's * ad, that's a conversion.</p> * * <p>For example, you set up an activity in Ad Manager to * track how many users visit an * advertiser's promotional website after viewing or clicking * on an ad. When a user views an ad, * then visits the page, that's one conversion.</p> */ public class Activity implements java.io.Serializable { /* The unique ID of the {@code Activity}. This value is readonly * and is assigned * by Google. */ private java.lang.Long id; /* The ID of the {@link ActivityGroup} that this {@link Activity} * belongs to. */ private java.lang.Long activityGroupId; /* The name of the {@code Activity}. This attribute is required * and has a maximum length of 255 * characters. */ private java.lang.String name; /* The URL of the webpage where the tags from this activity will * be placed. This attribute is * optional. */ private java.lang.String expectedURL; /* The status of this activity. This attribute is readonly. */ private com.google.api.ads.admanager.axis.v202111.ActivityStatus status; /* The activity type. This attribute is optional and defaults * to {@link Activity.Type#PAGE_VIEWS} */ private com.google.api.ads.admanager.axis.v202111.ActivityType type; public Activity() { } public Activity( java.lang.Long id, java.lang.Long activityGroupId, java.lang.String name, java.lang.String expectedURL, com.google.api.ads.admanager.axis.v202111.ActivityStatus status, com.google.api.ads.admanager.axis.v202111.ActivityType type) { this.id = id; this.activityGroupId = activityGroupId; this.name = name; this.expectedURL = expectedURL; this.status = status; this.type = type; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .add("activityGroupId", getActivityGroupId()) .add("expectedURL", getExpectedURL()) .add("id", getId()) .add("name", getName()) .add("status", getStatus()) .add("type", getType()) .toString(); } /** * Gets the id value for this Activity. * * @return id * The unique ID of the {@code Activity}. This value is readonly * and is assigned * by Google. */ public java.lang.Long getId() { return id; } /** * Sets the id value for this Activity. * * @param id * The unique ID of the {@code Activity}. This value is readonly * and is assigned * by Google. */ public void setId(java.lang.Long id) { this.id = id; } /** * Gets the activityGroupId value for this Activity. * * @return activityGroupId * The ID of the {@link ActivityGroup} that this {@link Activity} * belongs to. */ public java.lang.Long getActivityGroupId() { return activityGroupId; } /** * Sets the activityGroupId value for this Activity. * * @param activityGroupId * The ID of the {@link ActivityGroup} that this {@link Activity} * belongs to. */ public void setActivityGroupId(java.lang.Long activityGroupId) { this.activityGroupId = activityGroupId; } /** * Gets the name value for this Activity. * * @return name * The name of the {@code Activity}. This attribute is required * and has a maximum length of 255 * characters. */ public java.lang.String getName() { return name; } /** * Sets the name value for this Activity. * * @param name * The name of the {@code Activity}. This attribute is required * and has a maximum length of 255 * characters. */ public void setName(java.lang.String name) { this.name = name; } /** * Gets the expectedURL value for this Activity. * * @return expectedURL * The URL of the webpage where the tags from this activity will * be placed. This attribute is * optional. */ public java.lang.String getExpectedURL() { return expectedURL; } /** * Sets the expectedURL value for this Activity. * * @param expectedURL * The URL of the webpage where the tags from this activity will * be placed. This attribute is * optional. */ public void setExpectedURL(java.lang.String expectedURL) { this.expectedURL = expectedURL; } /** * Gets the status value for this Activity. * * @return status * The status of this activity. This attribute is readonly. */ public com.google.api.ads.admanager.axis.v202111.ActivityStatus getStatus() { return status; } /** * Sets the status value for this Activity. * * @param status * The status of this activity. This attribute is readonly. */ public void setStatus(com.google.api.ads.admanager.axis.v202111.ActivityStatus status) { this.status = status; } /** * Gets the type value for this Activity. * * @return type * The activity type. This attribute is optional and defaults * to {@link Activity.Type#PAGE_VIEWS} */ public com.google.api.ads.admanager.axis.v202111.ActivityType getType() { return type; } /** * Sets the type value for this Activity. * * @param type * The activity type. This attribute is optional and defaults * to {@link Activity.Type#PAGE_VIEWS} */ public void setType(com.google.api.ads.admanager.axis.v202111.ActivityType type) { this.type = type; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof Activity)) return false; Activity other = (Activity) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.id==null && other.getId()==null) || (this.id!=null && this.id.equals(other.getId()))) && ((this.activityGroupId==null && other.getActivityGroupId()==null) || (this.activityGroupId!=null && this.activityGroupId.equals(other.getActivityGroupId()))) && ((this.name==null && other.getName()==null) || (this.name!=null && this.name.equals(other.getName()))) && ((this.expectedURL==null && other.getExpectedURL()==null) || (this.expectedURL!=null && this.expectedURL.equals(other.getExpectedURL()))) && ((this.status==null && other.getStatus()==null) || (this.status!=null && this.status.equals(other.getStatus()))) && ((this.type==null && other.getType()==null) || (this.type!=null && this.type.equals(other.getType()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getId() != null) { _hashCode += getId().hashCode(); } if (getActivityGroupId() != null) { _hashCode += getActivityGroupId().hashCode(); } if (getName() != null) { _hashCode += getName().hashCode(); } if (getExpectedURL() != null) { _hashCode += getExpectedURL().hashCode(); } if (getStatus() != null) { _hashCode += getStatus().hashCode(); } if (getType() != null) { _hashCode += getType().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(Activity.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Activity")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("id"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "id")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("activityGroupId"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "activityGroupId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("name"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "name")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("expectedURL"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "expectedURL")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("status"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "status")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Activity.Status")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("type"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "type")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "Activity.Type")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package consulo.util.lang; import consulo.annotation.ReviewAfterMigrationToJRE; import org.jetbrains.annotations.Contract; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.lang.ref.Reference; import java.lang.reflect.Proxy; import java.util.Comparator; import java.util.List; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; /** * @author peter */ public class ObjectUtil { private ObjectUtil() { } /** * @see NotNullizer */ public static final Object NULL = sentinel("ObjectUtils.NULL"); /** * Creates a new object which could be used as sentinel value (special value to distinguish from any other object). It does not equal * to any other object. Usually should be assigned to the static final field. * * @param name an object name, returned from {@link #toString()} to simplify the debugging or heap dump analysis * (guaranteed to be stored as sentinel object field). If sentinel is assigned to the static final field, * it's recommended to supply that field name (possibly qualified with the class name). * @return a new sentinel object */ @Nonnull public static Object sentinel(@Nonnull String name) { return new Sentinel(name); } /** * They promise in http://mail.openjdk.java.net/pipermail/core-libs-dev/2018-February/051312.html that * the object reference won't be removed by JIT and GC-ed until this call. */ @ReviewAfterMigrationToJRE(9) public static void reachabilityFence(Object o) { Reference.reachabilityFence(o); } private static class Sentinel { private final String myName; Sentinel(@Nonnull String name) { myName = name; } @Override public String toString() { return myName; } } /** * Creates an instance of class {@code ofInterface} with its {@link Object#toString()} method returning {@code name}. * No other guarantees about return value behaviour. * {@code ofInterface} must represent an interface class. * Useful for stubs in generic code, e.g. for storing in {@code List<T>} to represent empty special value. */ @Nonnull public static <T> T sentinel(@Nonnull final String name, @Nonnull Class<T> ofInterface) { if (!ofInterface.isInterface()) { throw new IllegalArgumentException("Expected interface but got: " + ofInterface); } // java.lang.reflect.Proxy.ProxyClassFactory fails if the class is not available via the classloader. // We must use interface own classloader because classes from plugins are not available via ObjectUtils' classloader. //noinspection unchecked return (T)Proxy.newProxyInstance(ofInterface.getClassLoader(), new Class[]{ofInterface}, (__, method, args) -> { if ("toString".equals(method.getName()) && args.length == 0) { return name; } throw new AbstractMethodError(); }); } @Nonnull public static <T> T assertNotNull(@Nullable T t) { return notNull(t); } public static <T> void assertAllElementsNotNull(@Nonnull T[] array) { for (int i = 0; i < array.length; i++) { T t = array[i]; if (t == null) { throw new NullPointerException("Element [" + i + "] is null"); } } } @Contract(value = "!null, _ -> !null; _, !null -> !null; null, null -> null", pure = true) public static <T> T chooseNotNull(@Nullable T t1, @Nullable T t2) { return t1 == null ? t2 : t1; } @Contract(value = "!null, _ -> !null; _, !null -> !null; null, null -> null", pure = true) public static <T> T coalesce(@Nullable T t1, @Nullable T t2) { return chooseNotNull(t1, t2); } @Contract(value = "!null, _, _ -> !null; _, !null, _ -> !null; _, _, !null -> !null; null,null,null -> null", pure = true) public static <T> T coalesce(@Nullable T t1, @Nullable T t2, @Nullable T t3) { return t1 != null ? t1 : t2 != null ? t2 : t3; } @Nullable public static <T> T coalesce(@Nullable Iterable<? extends T> o) { if (o == null) return null; for (T t : o) { if (t != null) return t; } return null; } @Nonnull public static <T> T notNull(@Nullable T value) { //noinspection ConstantConditions return notNull(value, value); } @Nonnull @Contract(pure = true) public static <T> T notNull(@Nullable T value, @Nonnull T defaultValue) { return value == null ? defaultValue : value; } @Nonnull public static <T> T notNull(@Nullable T value, @Nonnull Supplier<? extends T> defaultValue) { return value == null ? defaultValue.get() : value; } @Contract(value = "null, _ -> null", pure = true) @Nullable public static <T> T tryCast(@Nullable Object obj, @Nonnull Class<T> clazz) { if (clazz.isInstance(obj)) { return clazz.cast(obj); } return null; } @Nullable public static <T, S> S doIfCast(@Nullable Object obj, @Nonnull Class<T> clazz, final Function<? super T, ? extends S> convertor) { if (clazz.isInstance(obj)) { //noinspection unchecked return convertor.apply((T)obj); } return null; } @Contract("null, _ -> null") @Nullable public static <T, S> S doIfNotNull(@Nullable T obj, @Nonnull Function<? super T, ? extends S> function) { return obj == null ? null : function.apply(obj); } public static <T> void consumeIfNotNull(@Nullable T obj, @Nonnull Consumer<? super T> consumer) { if (obj != null) { consumer.accept(obj); } } public static <T> void consumeIfCast(@Nullable Object obj, @Nonnull Class<T> clazz, final Consumer<? super T> consumer) { if (clazz.isInstance(obj)) { //noinspection unchecked consumer.accept((T)obj); } } @Nullable @Contract("null, _ -> null") public static <T> T nullizeByCondition(@Nullable final T obj, @Nonnull final Predicate<? super T> condition) { if (condition.test(obj)) { return null; } return obj; } @Nullable @Contract("null, _ -> null") public static <T> T nullizeIfDefaultValue(@Nullable T obj, @Nonnull T defaultValue) { if (obj == defaultValue) { return null; } return obj; } /** * Performs binary search on the range [fromIndex, toIndex) * * @param indexComparator a comparator which receives a middle index and returns the result of comparision of the value at this index and the goal value * (e.g 0 if found, -1 if the value[middleIndex] < goal, or 1 if value[middleIndex] > goal) * @return index for which {@code indexComparator} returned 0 or {@code -insertionIndex-1} if wasn't found * @see java.util.Arrays#binarySearch(Object[], Object, Comparator) * @see java.util.Collections#binarySearch(List, Object, Comparator) */ //public static int binarySearch(int fromIndex, int toIndex, @NotNull IntIntFunction indexComparator) { // int low = fromIndex; // int high = toIndex - 1; // while (low <= high) { // int mid = (low + high) >>> 1; // int cmp = indexComparator.fun(mid); // if (cmp < 0) low = mid + 1; // else if (cmp > 0) high = mid - 1; // else return mid; // } // return -(low + 1); //} }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.tools; import java.io.File; import java.util.*; import java.util.concurrent.TimeUnit; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.commons.cli.*; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Directories; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.db.compaction.*; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.utils.JVMStabilityInspector; import org.apache.cassandra.utils.OutputHandler; import static org.apache.cassandra.tools.BulkLoader.CmdLineOptions; public class StandaloneScrubber { public static final String REINSERT_OVERFLOWED_TTL_OPTION_DESCRIPTION = "Rewrites rows with overflowed expiration date affected by CASSANDRA-14092 with " + "the maximum supported expiration date of 2038-01-19T03:14:06+00:00. " + "The rows are rewritten with the original timestamp incremented by one millisecond " + "to override/supersede any potential tombstone that may have been generated " + "during compaction of the affected rows."; private static final String TOOL_NAME = "sstablescrub"; private static final String VERBOSE_OPTION = "verbose"; private static final String DEBUG_OPTION = "debug"; private static final String HELP_OPTION = "help"; private static final String MANIFEST_CHECK_OPTION = "manifest-check"; private static final String SKIP_CORRUPTED_OPTION = "skip-corrupted"; private static final String NO_VALIDATE_OPTION = "no-validate"; private static final String REINSERT_OVERFLOWED_TTL_OPTION = "reinsert-overflowed-ttl"; public static void main(String args[]) { Options options = Options.parseArgs(args); Util.initDatabaseDescriptor(); try { // load keyspace descriptions. Schema.instance.loadFromDisk(false); if (Schema.instance.getKeyspaceMetadata(options.keyspaceName) == null) throw new IllegalArgumentException(String.format("Unknown keyspace %s", options.keyspaceName)); // Do not load sstables since they might be broken Keyspace keyspace = Keyspace.openWithoutSSTables(options.keyspaceName); ColumnFamilyStore cfs = null; for (ColumnFamilyStore c : keyspace.getValidColumnFamilies(true, false, options.cfName)) { if (c.name.equals(options.cfName)) { cfs = c; break; } } if (cfs == null) throw new IllegalArgumentException(String.format("Unknown table %s.%s", options.keyspaceName, options.cfName)); String snapshotName = "pre-scrub-" + System.currentTimeMillis(); OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug); Directories.SSTableLister lister = cfs.getDirectories().sstableLister(Directories.OnTxnErr.THROW).skipTemporary(true); List<SSTableReader> sstables = new ArrayList<>(); // Scrub sstables for (Map.Entry<Descriptor, Set<Component>> entry : lister.list().entrySet()) { Set<Component> components = entry.getValue(); if (!components.contains(Component.DATA)) continue; try { SSTableReader sstable = SSTableReader.openNoValidation(entry.getKey(), components, cfs); sstables.add(sstable); File snapshotDirectory = Directories.getSnapshotDirectory(sstable.descriptor, snapshotName); sstable.createLinks(snapshotDirectory.getPath()); } catch (Exception e) { JVMStabilityInspector.inspectThrowable(e); System.err.println(String.format("Error Loading %s: %s", entry.getKey(), e.getMessage())); if (options.debug) e.printStackTrace(System.err); } } System.out.println(String.format("Pre-scrub sstables snapshotted into snapshot %s", snapshotName)); if (!options.manifestCheckOnly) { for (SSTableReader sstable : sstables) { try (LifecycleTransaction txn = LifecycleTransaction.offline(OperationType.SCRUB, sstable)) { txn.obsoleteOriginals(); // make sure originals are deleted and avoid NPE if index is missing, CASSANDRA-9591 try (Scrubber scrubber = new Scrubber(cfs, txn, options.skipCorrupted, handler, !options.noValidate, options.reinserOverflowedTTL)) { scrubber.scrub(); } catch (Throwable t) { if (!cfs.rebuildOnFailedScrub(t)) { System.out.println(t.getMessage()); throw t; } } } catch (Exception e) { System.err.println(String.format("Error scrubbing %s: %s", sstable, e.getMessage())); e.printStackTrace(System.err); } } } // Check (and repair) manifests checkManifest(cfs.getCompactionStrategyManager(), cfs, sstables); CompactionManager.instance.finishCompactionsAndShutdown(5, TimeUnit.MINUTES); LifecycleTransaction.waitForDeletions(); System.exit(0); // We need that to stop non daemonized threads } catch (Exception e) { System.err.println(e.getMessage()); if (options.debug) e.printStackTrace(System.err); System.exit(1); } } private static void checkManifest(CompactionStrategyManager strategyManager, ColumnFamilyStore cfs, Collection<SSTableReader> sstables) { if (strategyManager.getCompactionParams().klass().equals(LeveledCompactionStrategy.class)) { int maxSizeInMB = (int)((cfs.getCompactionStrategyManager().getMaxSSTableBytes()) / (1024L * 1024L)); System.out.println("Checking leveled manifest"); Predicate<SSTableReader> repairedPredicate = new Predicate<SSTableReader>() { @Override public boolean apply(SSTableReader sstable) { return sstable.isRepaired(); } }; List<SSTableReader> repaired = Lists.newArrayList(Iterables.filter(sstables, repairedPredicate)); List<SSTableReader> unRepaired = Lists.newArrayList(Iterables.filter(sstables, Predicates.not(repairedPredicate))); LeveledManifest repairedManifest = LeveledManifest.create(cfs, maxSizeInMB, cfs.getLevelFanoutSize(), repaired); for (int i = 1; i < repairedManifest.getLevelCount(); i++) { repairedManifest.repairOverlappingSSTables(i); } LeveledManifest unRepairedManifest = LeveledManifest.create(cfs, maxSizeInMB, cfs.getLevelFanoutSize(), unRepaired); for (int i = 1; i < unRepairedManifest.getLevelCount(); i++) { unRepairedManifest.repairOverlappingSSTables(i); } } } private static class Options { public final String keyspaceName; public final String cfName; public boolean debug; public boolean verbose; public boolean manifestCheckOnly; public boolean skipCorrupted; public boolean noValidate; public boolean reinserOverflowedTTL; private Options(String keyspaceName, String cfName) { this.keyspaceName = keyspaceName; this.cfName = cfName; } public static Options parseArgs(String cmdArgs[]) { CommandLineParser parser = new GnuParser(); CmdLineOptions options = getCmdLineOptions(); try { CommandLine cmd = parser.parse(options, cmdArgs, false); if (cmd.hasOption(HELP_OPTION)) { printUsage(options); System.exit(0); } String[] args = cmd.getArgs(); if (args.length != 2) { String msg = args.length < 2 ? "Missing arguments" : "Too many arguments"; System.err.println(msg); printUsage(options); System.exit(1); } String keyspaceName = args[0]; String cfName = args[1]; Options opts = new Options(keyspaceName, cfName); opts.debug = cmd.hasOption(DEBUG_OPTION); opts.verbose = cmd.hasOption(VERBOSE_OPTION); opts.manifestCheckOnly = cmd.hasOption(MANIFEST_CHECK_OPTION); opts.skipCorrupted = cmd.hasOption(SKIP_CORRUPTED_OPTION); opts.noValidate = cmd.hasOption(NO_VALIDATE_OPTION); opts.reinserOverflowedTTL = cmd.hasOption(REINSERT_OVERFLOWED_TTL_OPTION); return opts; } catch (ParseException e) { errorMsg(e.getMessage(), options); return null; } } private static void errorMsg(String msg, CmdLineOptions options) { System.err.println(msg); printUsage(options); System.exit(1); } private static CmdLineOptions getCmdLineOptions() { CmdLineOptions options = new CmdLineOptions(); options.addOption(null, DEBUG_OPTION, "display stack traces"); options.addOption("v", VERBOSE_OPTION, "verbose output"); options.addOption("h", HELP_OPTION, "display this help message"); options.addOption("m", MANIFEST_CHECK_OPTION, "only check and repair the leveled manifest, without actually scrubbing the sstables"); options.addOption("s", SKIP_CORRUPTED_OPTION, "skip corrupt rows in counter tables"); options.addOption("n", NO_VALIDATE_OPTION, "do not validate columns using column validator"); options.addOption("r", REINSERT_OVERFLOWED_TTL_OPTION, REINSERT_OVERFLOWED_TTL_OPTION_DESCRIPTION); return options; } public static void printUsage(CmdLineOptions options) { String usage = String.format("%s [options] <keyspace> <column_family>", TOOL_NAME); StringBuilder header = new StringBuilder(); header.append("--\n"); header.append("Scrub the sstable for the provided table." ); header.append("\n--\n"); header.append("Options are:"); new HelpFormatter().printHelp(usage, header.toString(), options, ""); } } }
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.configurationprocessor; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.assertj.core.api.Condition; import org.hamcrest.collection.IsMapContaining; import org.springframework.boot.configurationprocessor.metadata.ConfigurationMetadata; import org.springframework.boot.configurationprocessor.metadata.ItemDeprecation; import org.springframework.boot.configurationprocessor.metadata.ItemHint; import org.springframework.boot.configurationprocessor.metadata.ItemMetadata; import org.springframework.boot.configurationprocessor.metadata.ItemMetadata.ItemType; import org.springframework.util.ObjectUtils; /** * AssertJ {@link Condition} to help test {@link ConfigurationMetadata}. * * @author Phillip Webb * @author Stephane Nicoll */ public final class Metadata { private Metadata() { } public static MetadataItemCondition withGroup(String name) { return new MetadataItemCondition(ItemType.GROUP, name); } public static MetadataItemCondition withGroup(String name, Class<?> type) { return new MetadataItemCondition(ItemType.GROUP, name).ofType(type); } public static MetadataItemCondition withGroup(String name, String type) { return new MetadataItemCondition(ItemType.GROUP, name).ofType(type); } public static MetadataItemCondition withProperty(String name) { return new MetadataItemCondition(ItemType.PROPERTY, name); } public static MetadataItemCondition withProperty(String name, Class<?> type) { return new MetadataItemCondition(ItemType.PROPERTY, name).ofType(type); } public static MetadataItemCondition withProperty(String name, String type) { return new MetadataItemCondition(ItemType.PROPERTY, name).ofType(type); } public static MetadataHintCondition withHint(String name) { return new MetadataHintCondition(name); } public static class MetadataItemCondition extends Condition<ConfigurationMetadata> { private final ItemType itemType; private final String name; private final String type; private final Class<?> sourceType; private final String description; private final Object defaultValue; private final ItemDeprecation deprecation; public MetadataItemCondition(ItemType itemType, String name) { this(itemType, name, null, null, null, null, null); } public MetadataItemCondition(ItemType itemType, String name, String type, Class<?> sourceType, String description, Object defaultValue, ItemDeprecation deprecation) { this.itemType = itemType; this.name = name; this.type = type; this.sourceType = sourceType; this.description = description; this.defaultValue = defaultValue; this.deprecation = deprecation; describedAs(createDescription()); } private String createDescription() { StringBuilder description = new StringBuilder(); description.append("an item named '" + this.name + "'"); if (this.type != null) { description.append(" with dataType:").append(this.type); } if (this.sourceType != null) { description.append(" with sourceType:").append(this.sourceType); } if (this.defaultValue != null) { description.append(" with defaultValue:").append(this.defaultValue); } if (this.description != null) { description.append(" with description:").append(this.description); } if (this.deprecation != null) { description.append(" with deprecation:").append(this.deprecation); } return description.toString(); } @Override public boolean matches(ConfigurationMetadata value) { ItemMetadata itemMetadata = getFirstItemWithName(value, this.name); if (itemMetadata == null) { return false; } if (this.type != null && !this.type.equals(itemMetadata.getType())) { return false; } if (this.sourceType != null && !this.sourceType.getName().equals(itemMetadata.getSourceType())) { return false; } if (this.defaultValue != null && !ObjectUtils .nullSafeEquals(this.defaultValue, itemMetadata.getDefaultValue())) { return false; } if (this.description != null && !this.description.equals(itemMetadata.getDescription())) { return false; } if (this.deprecation == null && itemMetadata.getDeprecation() != null) { return false; } if (this.deprecation != null && !this.deprecation.equals(itemMetadata.getDeprecation())) { return false; } return true; } public MetadataItemCondition ofType(Class<?> dataType) { return new MetadataItemCondition(this.itemType, this.name, dataType.getName(), this.sourceType, this.description, this.defaultValue, this.deprecation); } public MetadataItemCondition ofType(String dataType) { return new MetadataItemCondition(this.itemType, this.name, dataType, this.sourceType, this.description, this.defaultValue, this.deprecation); } public MetadataItemCondition fromSource(Class<?> sourceType) { return new MetadataItemCondition(this.itemType, this.name, this.type, sourceType, this.description, this.defaultValue, this.deprecation); } public MetadataItemCondition withDescription(String description) { return new MetadataItemCondition(this.itemType, this.name, this.type, this.sourceType, description, this.defaultValue, this.deprecation); } public MetadataItemCondition withDefaultValue(Object defaultValue) { return new MetadataItemCondition(this.itemType, this.name, this.type, this.sourceType, this.description, defaultValue, this.deprecation); } public MetadataItemCondition withDeprecation(String reason, String replacement) { return new MetadataItemCondition(this.itemType, this.name, this.type, this.sourceType, this.description, this.defaultValue, new ItemDeprecation(reason, replacement)); } public MetadataItemCondition withNoDeprecation() { return new MetadataItemCondition(this.itemType, this.name, this.type, this.sourceType, this.description, this.defaultValue, null); } private ItemMetadata getFirstItemWithName(ConfigurationMetadata metadata, String name) { for (ItemMetadata item : metadata.getItems()) { if (item.isOfItemType(this.itemType) && name.equals(item.getName())) { return item; } } return null; } } public static class MetadataHintCondition extends Condition<ConfigurationMetadata> { private final String name; private final List<ItemHintValueCondition> valueConditions; private final List<ItemHintProviderCondition> providerConditions; public MetadataHintCondition(String name) { this.name = name; this.valueConditions = Collections.emptyList(); this.providerConditions = Collections.emptyList(); } public MetadataHintCondition(String name, List<ItemHintValueCondition> valueConditions, List<ItemHintProviderCondition> providerConditions) { this.name = name; this.valueConditions = valueConditions; this.providerConditions = providerConditions; describedAs(createDescription()); } private String createDescription() { StringBuilder description = new StringBuilder(); description.append("a hints name '" + this.name + "'"); if (!this.valueConditions.isEmpty()) { description.append(" with values:").append(this.valueConditions); } if (!this.providerConditions.isEmpty()) { description.append(" with providers:").append(this.providerConditions); } return description.toString(); } @Override public boolean matches(ConfigurationMetadata metadata) { ItemHint itemHint = getFirstHintWithName(metadata, this.name); if (itemHint == null) { return false; } return matches(itemHint, this.valueConditions) && matches(itemHint, this.providerConditions); } private boolean matches(ItemHint itemHint, List<? extends Condition<ItemHint>> conditions) { for (Condition<ItemHint> condition : conditions) { if (!condition.matches(itemHint)) { return false; } } return true; } private ItemHint getFirstHintWithName(ConfigurationMetadata metadata, String name) { for (ItemHint hint : metadata.getHints()) { if (name.equals(hint.getName())) { return hint; } } return null; } public MetadataHintCondition withValue(int index, Object value, String description) { return new MetadataHintCondition(this.name, add(this.valueConditions, new ItemHintValueCondition(index, value, description)), this.providerConditions); } public MetadataHintCondition withProvider(String provider) { return withProvider(this.providerConditions.size(), provider, null); } public MetadataHintCondition withProvider(String provider, String key, Object value) { return withProvider(this.providerConditions.size(), provider, Collections.singletonMap(key, value)); } public MetadataHintCondition withProvider(int index, String provider, Map<String, Object> parameters) { return new MetadataHintCondition(this.name, this.valueConditions, add(this.providerConditions, new ItemHintProviderCondition(index, provider, parameters))); } private <T> List<T> add(List<T> items, T item) { List<T> result = new ArrayList<T>(items); result.add(item); return result; } } private static class ItemHintValueCondition extends Condition<ItemHint> { private final int index; private final Object value; private final String description; ItemHintValueCondition(int index, Object value, String description) { this.index = index; this.value = value; this.description = description; describedAs(createDescription()); } private String createDescription() { StringBuilder description = new StringBuilder(); description.append("value hint at index '" + this.index + "'"); if (this.value != null) { description.append(" with value:").append(this.value); } if (this.description != null) { description.append(" with description:").append(this.description); } return description.toString(); } @Override public boolean matches(ItemHint value) { if (this.index + 1 > value.getValues().size()) { return false; } ItemHint.ValueHint valueHint = value.getValues().get(this.index); if (this.value != null && !this.value.equals(valueHint.getValue())) { return false; } if (this.description != null && !this.description.equals(valueHint.getDescription())) { return false; } return true; } } private static class ItemHintProviderCondition extends Condition<ItemHint> { private final int index; private final String name; private final Map<String, Object> parameters; ItemHintProviderCondition(int index, String name, Map<String, Object> parameters) { this.index = index; this.name = name; this.parameters = parameters; describedAs(createDescription()); } public String createDescription() { StringBuilder description = new StringBuilder(); description.append("value provider"); if (this.name != null) { description.append(" with name:").append(this.name); } if (this.parameters != null) { description.append(" with parameters:").append(this.parameters); } return description.toString(); } @Override public boolean matches(ItemHint hint) { if (this.index + 1 > hint.getProviders().size()) { return false; } ItemHint.ValueProvider valueProvider = hint.getProviders().get(this.index); if (this.name != null && !this.name.equals(valueProvider.getName())) { return false; } if (this.parameters != null) { for (Map.Entry<String, Object> entry : this.parameters.entrySet()) { if (!IsMapContaining.hasEntry(entry.getKey(), entry.getValue()) .matches(valueProvider.getParameters())) { return false; } } } return true; } } }
package us.kbase.workspace.test.workspace; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static us.kbase.common.test.TestCommon.set; import java.util.Arrays; import java.util.List; import org.junit.Test; import nl.jqno.equalsverifier.EqualsVerifier; import us.kbase.common.test.TestCommon; import us.kbase.workspace.database.AllUsers; import us.kbase.workspace.database.Permission; import us.kbase.workspace.database.PermissionSet; import us.kbase.workspace.database.PermissionSet.Builder; import us.kbase.workspace.database.ResolvedWorkspaceID; import us.kbase.workspace.database.WorkspaceUser; public class PermissionSetTest { private static final ResolvedWorkspaceID RWSID1 = new ResolvedWorkspaceID(1, "someworkspace1", false, false); private static final ResolvedWorkspaceID RWSID2 = new ResolvedWorkspaceID(2, "someworkspace2", false, false); private static final ResolvedWorkspaceID RWSID3 = new ResolvedWorkspaceID(3, "someworkspace3", false, false); private static final ResolvedWorkspaceID RWSID4 = new ResolvedWorkspaceID(4, "someworkspace4", false, false); private static final ResolvedWorkspaceID RWSID5 = new ResolvedWorkspaceID(5, "someworkspace5", false, false); private static final ResolvedWorkspaceID RWSID6 = new ResolvedWorkspaceID(6, "someworkspace6", false, false); @Test public void equals() throws Exception { EqualsVerifier.forClass(PermissionSet.class).usingGetClass().verify(); // test equality on internal Perms class final WorkspaceUser u = new WorkspaceUser("u"); final AllUsers a = new AllUsers('*'); final ResolvedWorkspaceID w = new ResolvedWorkspaceID(1, "a", false, false); final PermissionSet p1 = PermissionSet.getBuilder(u, a) .withWorkspace(w, Permission.WRITE, Permission.READ).build(); PermissionSet p2 = PermissionSet.getBuilder(u, a) .withWorkspace(w, Permission.WRITE, Permission.READ).build(); assertThat("incorrect set", p1, is(p2)); p2 = PermissionSet.getBuilder(u, a) .withWorkspace(w, Permission.ADMIN, Permission.READ).build(); assertThat("incorrect equality", p1.equals(p2), is(false)); p2 = PermissionSet.getBuilder(u, a) .withWorkspace(w, Permission.WRITE, Permission.NONE).build(); assertThat("incorrect equality", p1.equals(p2), is(false)); // no perms uses the same instance of Perms class final PermissionSet p3 = PermissionSet.getBuilder(u, a).withUnreadableWorkspace(w).build(); final PermissionSet p4 = PermissionSet.getBuilder(u, a).withUnreadableWorkspace(w).build(); assertThat("incorrect set", p3, is(p4)); } @Test public void buildEmpty() { final PermissionSet p = PermissionSet.getBuilder( new WorkspaceUser("foo"), new AllUsers('*')).build(); assertThat("incorrect user", p.getUser(), is(new WorkspaceUser("foo"))); assertThat("incorrect global user", p.getGlobalUser(), is(new AllUsers('*'))); assertThat("incorrect workspaces", p.getWorkspaces(), is(set())); assertThat("incorrect is empty", p.isEmpty(), is(true)); assertThat("incorrect has ws", p.hasWorkspace(RWSID1), is(false)); assertThat("incorrect get perm", p.getPermission(RWSID1), is(Permission.NONE)); assertThat("incorrect get user perm", p.getUserPermission(RWSID1), is(Permission.NONE)); assertThat("incorrect world read", p.isWorldReadable(RWSID1), is(false)); assertThat("incorrect has perm", p.hasPermission(RWSID1, Permission.READ), is(false)); assertThat("incorrect has user perm", p.hasUserPermission(RWSID1, Permission.READ), is(false)); } @Test public void buildWithNullUser() { final PermissionSet p = PermissionSet.getBuilder( null, new AllUsers('*')) .withUnreadableWorkspace(RWSID1) .withWorkspace(RWSID2, Permission.NONE, Permission.READ) .build(); assertThat("incorrect user", p.getUser(), is(nullValue())); assertThat("incorrect global user", p.getGlobalUser(), is(new AllUsers('*'))); assertThat("incorrect workspaces", p.getWorkspaces(), is(set(RWSID1, RWSID2))); assertThat("incorrect is empty", p.isEmpty(), is(false)); assertThat("incorrect has ws", p.hasWorkspace(RWSID1), is(true)); assertThat("incorrect has ws", p.hasWorkspace(RWSID2), is(true)); assertThat("incorrect has ws", p.hasWorkspace(RWSID3), is(false)); assertThat("incorrect get perm", p.getPermission(RWSID1), is(Permission.NONE)); assertThat("incorrect get user perm", p.getUserPermission(RWSID1), is(Permission.NONE)); assertThat("incorrect world read", p.isWorldReadable(RWSID1), is(false)); assertThat("incorrect has perm", p.hasPermission(RWSID1, Permission.READ), is(false)); assertThat("incorrect has user perm", p.hasUserPermission(RWSID1, Permission.READ), is(false)); assertThat("incorrect get perm", p.getPermission(RWSID2), is(Permission.READ)); assertThat("incorrect get user perm", p.getUserPermission(RWSID2), is(Permission.NONE)); assertThat("incorrect world read", p.isWorldReadable(RWSID2), is(true)); assertThat("incorrect has perm", p.hasPermission(RWSID2, Permission.READ), is(true)); assertThat("incorrect has perm", p.hasPermission(RWSID2, Permission.WRITE), is(false)); assertThat("incorrect has user perm", p.hasUserPermission(RWSID2, Permission.READ), is(false)); } @Test public void buildWithVariousPermissions() { final PermissionSet p = PermissionSet.getBuilder( new WorkspaceUser("foo"), new AllUsers('*')) .withWorkspace(RWSID1, Permission.NONE, Permission.READ) .withWorkspace(RWSID2, Permission.READ, Permission.NONE) .withWorkspace(RWSID3, Permission.WRITE, Permission.READ) .withWorkspace(RWSID4, Permission.ADMIN, Permission.NONE) .withWorkspace(RWSID5, Permission.OWNER, Permission.NONE) .build(); assertThat("incorrect user", p.getUser(), is(new WorkspaceUser("foo"))); assertThat("incorrect global user", p.getGlobalUser(), is(new AllUsers('*'))); assertThat("incorrect workspaces", p.getWorkspaces(), is(set(RWSID1, RWSID2, RWSID3, RWSID4, RWSID5))); assertThat("incorrect is empty", p.isEmpty(), is(false)); assertThat("incorrect has ws", p.hasWorkspace(RWSID6), is(false)); checkPerms(p, RWSID1, Permission.READ, Permission.NONE, true, Arrays.asList(true, true, false, false, false), Arrays.asList(true, false, false, false, false)); checkPerms(p, RWSID2, Permission.READ, Permission.READ, false, Arrays.asList(true, true, false, false, false), Arrays.asList(true, true, false, false, false)); checkPerms(p, RWSID3, Permission.WRITE, Permission.WRITE, true, Arrays.asList(true, true, true, false, false), Arrays.asList(true, true, true, false, false)); checkPerms(p, RWSID4, Permission.ADMIN, Permission.ADMIN, false, Arrays.asList(true, true, true, true, false), Arrays.asList(true, true, true, true, false)); checkPerms(p, RWSID5, Permission.OWNER, Permission.OWNER, false, Arrays.asList(true, true, true, true, true), Arrays.asList(true, true, true, true, true)); } @Test public void buildWithNullPermissions() { // only tests specific effects of null permissions final PermissionSet p = PermissionSet.getBuilder( new WorkspaceUser("foo"), new AllUsers('*')) .withWorkspace(RWSID1, null, Permission.READ) .withWorkspace(RWSID2, Permission.READ, null) .build(); checkPerms(p, RWSID1, Permission.READ, Permission.NONE, true, Arrays.asList(true, true, false, false, false), Arrays.asList(true, false, false, false, false)); checkPerms(p, RWSID2, Permission.READ, Permission.READ, false, Arrays.asList(true, true, false, false, false), Arrays.asList(true, true, false, false, false)); } private void checkPerms( final PermissionSet p, final ResolvedWorkspaceID ws, final Permission perm, final Permission userperm, final boolean worldreadable, final List<Boolean> perms, final List<Boolean> userperms) { assertThat("incorrect get perm", p.getPermission(ws), is(perm)); assertThat("incorrect get user perm", p.getUserPermission(ws), is(userperm)); assertThat("incorrect world read", p.isWorldReadable(ws), is(worldreadable)); for (int i = 0; i < Permission.values().length; i++) { assertThat("incorrect has perm", p.hasPermission(ws, Permission.values()[i]), is(perms.get(i))); assertThat("incorrect has user perm", p.hasUserPermission(ws, Permission.values()[i]), is(userperms.get(i))); } } @Test public void string() { final PermissionSet p = PermissionSet.getBuilder( new WorkspaceUser("foo"), new AllUsers('*')) // only test one item since would need to sort the internal map for consistent // results .withWorkspace(RWSID1, Permission.WRITE, Permission.READ) .build(); assertThat("incorrect toString", p.toString(), is("PermissionSet [user=User [user=foo], globalUser=AllUsers [user=*], " + "perms={ResolvedWorkspaceID [id=1, wsname=someworkspace1, locked=false, " + "deleted=false]=Perms [perm=WRITE, worldRead=true]}]")); } @Test public void builderHasWorkspace() { final Builder p = PermissionSet.getBuilder( new WorkspaceUser("foo"), new AllUsers('*')) .withWorkspace(RWSID1, Permission.NONE, Permission.READ) .withWorkspace(RWSID5, Permission.OWNER, Permission.NONE); assertThat("incorrect has ws", p.hasWorkspace(RWSID1), is(true)); assertThat("incorrect has ws", p.hasWorkspace(RWSID2), is(false)); assertThat("incorrect has ws", p.hasWorkspace(RWSID5), is(true)); } @Test public void buildFailStart() { try { PermissionSet.getBuilder(null, null); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, new IllegalArgumentException( "Global user cannot be null")); } } @Test public void buildFailAddWorkspace() { final Builder p = PermissionSet.getBuilder(new WorkspaceUser("foo"), new AllUsers('*')) .withWorkspace(RWSID2, Permission.READ, Permission.NONE); failBuildWithWorkspace(p, null, Permission.READ, Permission.NONE, new IllegalArgumentException("Workspace ID cannot be null")); failBuildWithWorkspace(p, RWSID2, Permission.READ, Permission.NONE, new IllegalArgumentException("Permissions for workspace 2 have already been set")); failBuildWithWorkspace(p, RWSID1, Permission.NONE, Permission.NONE, new IllegalArgumentException("Cannot add unreadable workspace")); failBuildWithWorkspace(p, RWSID1, null, null, new IllegalArgumentException("Cannot add unreadable workspace")); failBuildWithWorkspace(p, RWSID1, Permission.READ, Permission.WRITE, new IllegalArgumentException("Illegal global permission: WRITE")); failBuildWithWorkspace(p, RWSID1, Permission.READ, Permission.ADMIN, new IllegalArgumentException("Illegal global permission: ADMIN")); failBuildWithWorkspace(p, RWSID1, Permission.READ, Permission.OWNER, new IllegalArgumentException("Illegal global permission: OWNER")); } @Test public void buildFailAddWorkspaceAnonUser() { final Builder p = PermissionSet.getBuilder(null, new AllUsers('*')); failBuildWithWorkspace(p, RWSID1, Permission.READ, Permission.READ, new IllegalArgumentException( "anonymous users can't have user specific permissions")); failBuildWithWorkspace(p, RWSID1, Permission.WRITE, Permission.READ, new IllegalArgumentException( "anonymous users can't have user specific permissions")); failBuildWithWorkspace(p, RWSID1, Permission.ADMIN, Permission.READ, new IllegalArgumentException( "anonymous users can't have user specific permissions")); failBuildWithWorkspace(p, RWSID1, Permission.OWNER, Permission.READ, new IllegalArgumentException( "anonymous users can't have user specific permissions")); } private void failBuildWithWorkspace( final Builder p, final ResolvedWorkspaceID ws, final Permission userPerm, final Permission globalPerm, final Exception e) { try { p.withWorkspace(ws, userPerm, globalPerm); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, e); } } @Test public void buildFailAddUnreadableWorkspace() { final Builder p = PermissionSet.getBuilder(new WorkspaceUser("foo"), new AllUsers('*')) .withWorkspace(RWSID2, Permission.READ, Permission.NONE); failBuildWithUnreadableWorkspace(p, null, new IllegalArgumentException("Workspace ID cannot be null")); failBuildWithUnreadableWorkspace(p, RWSID2, new IllegalArgumentException("Permissions for workspace 2 have already been set")); } private void failBuildWithUnreadableWorkspace( final Builder p, final ResolvedWorkspaceID ws, final Exception e) { try { p.withUnreadableWorkspace(ws); fail("expected exception"); } catch (Exception got) { TestCommon.assertExceptionCorrect(got, e); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide; import com.intellij.ide.ui.laf.darcula.ui.DarculaButtonUI; import com.intellij.openapi.ui.popup.ComponentPopupBuilder; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.Gray; import com.intellij.ui.JBColor; import com.intellij.ui.awt.RelativePoint; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.ui.components.panels.VerticalLayout; import com.intellij.util.Alarm; import com.intellij.util.ui.JBUI; import sun.swing.SwingUtilities2; import javax.swing.*; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.font.FontRenderContext; import java.awt.font.LineBreakMeasurer; import java.awt.font.TextAttribute; import java.awt.font.TextLayout; import java.beans.PropertyChangeListener; import java.text.AttributedCharacterIterator; import java.text.AttributedString; import java.util.Map; public class HelpTooltip { private static Color BACKGROUND_COLOR = new JBColor(Gray.xF7, new Color(0x46484a)); private static Color FONT_COLOR = new JBColor(Gray.x33, Gray.xBF); private static Color SHORTCUT_COLOR = new JBColor(Gray.x78, Gray.x87); private static Color BORDER_COLOR = new JBColor(Gray.xA1, new Color(0x5b5c5e)); private static final int SPACE = JBUI.scale(10); private static final int MAX_WIDTH = JBUI.scale(250); private static final String DOTS = "..."; private String title; private String shortcut; private String description; private LinkLabel link; private boolean neverHide; private JComponent owner; private ComponentPopupBuilder myPopupBuilder; private JBPopup myPopup; private Alarm popupAlarm = new Alarm(); private boolean isOverPopup; private boolean isMultiline; private int myDismissDelay; private MouseAdapter myMouseListener; private PropertyChangeListener myPropertyChangeListener; @SuppressWarnings("unused") public HelpTooltip setTitle(String title) { this.title = title; return this; } @SuppressWarnings("unused") public HelpTooltip setShortcut(String shortcut) { this.shortcut = shortcut; return this; } @SuppressWarnings("unused") public HelpTooltip setDescription(String description) { this.description = description; return this; } @SuppressWarnings("unused") public HelpTooltip setLink(String linkText, Runnable linkAction) { this.link = LinkLabel.create(linkText, () -> { hidePopup(true); linkAction.run(); }); return this; } @SuppressWarnings("unused") public HelpTooltip setNeverHideOnTimeout(boolean neverHide) { this.neverHide = neverHide; return this; } public void installOn(JComponent component) { JPanel tipPanel = new JPanel(); tipPanel.addMouseListener(new MouseAdapter() { @Override public void mouseEntered(MouseEvent e) { isOverPopup = true; } @Override public void mouseExited(MouseEvent e) { if (link == null || !link.getBounds().contains(e.getPoint())) { isOverPopup = false; hidePopup(false); } } }); tipPanel.setLayout(new VerticalLayout(SPACE)); tipPanel.setBackground(BACKGROUND_COLOR); if (StringUtil.isNotEmpty(title)) { tipPanel.add(new Header(), VerticalLayout.TOP); } if (StringUtil.isNotEmpty(description)) { String[] pa = description.split("\n"); for (String p : pa) { JLabel label = new JLabel(); label.setForeground(FONT_COLOR); int width = SwingUtilities2.stringWidth(label, label.getFontMetrics(label.getFont()), p); isMultiline = isMultiline || width > MAX_WIDTH; width = Math.min(MAX_WIDTH, width); label.setText(String.format("<html><div width=%d>%s</div></html>", width, p)); tipPanel.add(label, VerticalLayout.TOP); } } if (link != null) { tipPanel.add(link, VerticalLayout.TOP); } isMultiline = isMultiline || StringUtil.isNotEmpty(description) && (StringUtil.isNotEmpty(title) || link != null); tipPanel.setBorder(isMultiline ? JBUI.Borders.empty(10, 10, 10, 16) : JBUI.Borders.empty(5, 8, 4, 8)); myDismissDelay = Registry.intValue(isMultiline ? "ide.helptooltip.full.dismissDelay" : "ide.helptooltip.regular.dismissDelay"); neverHide = neverHide || DarculaButtonUI.isHelpButton(component); owner = component; myPopupBuilder = JBPopupFactory.getInstance(). createComponentPopupBuilder(tipPanel, null). setBorderColor(BORDER_COLOR); myMouseListener = new MouseAdapter() { @Override public void mouseEntered(MouseEvent e) { if (myPopup != null && !myPopup.isDisposed()){ myPopup.cancel(); } scheduleShow(Registry.intValue("ide.tooltip.initialReshowDelay")); } @Override public void mouseExited(MouseEvent e) { scheduleHide(link == null, Registry.intValue("ide.tooltip.initialDelay.highlighter")); } @Override public void mouseMoved(MouseEvent e) { if (myPopup == null || myPopup.isDisposed()) { scheduleShow(Registry.intValue("ide.tooltip.reshowDelay")); } } }; myPropertyChangeListener = evt -> { if (evt.getNewValue() == null) { // owner is removed from the component tree hidePopup(true); if (owner != null) { owner.removeMouseListener(myMouseListener); owner.removeMouseMotionListener(myMouseListener); owner.removePropertyChangeListener(myPropertyChangeListener); owner = null; } } }; owner.addMouseListener(myMouseListener); owner.addMouseMotionListener(myMouseListener); owner.addPropertyChangeListener("ancestor", myPropertyChangeListener); } private void scheduleShow(int delay) { popupAlarm.cancelAllRequests(); popupAlarm.addRequest(() -> { Dimension size = owner.getSize(); myPopup = myPopupBuilder.createPopup(); myPopup.show(new RelativePoint(owner, new Point(size.width / 2, size.height + JBUI.scale(4)))); if (!neverHide) { scheduleHide(true, myDismissDelay); } }, delay); } private void scheduleHide(boolean force, int delay) { popupAlarm.cancelAllRequests(); popupAlarm.addRequest(() -> hidePopup(force), delay); } private void hidePopup(boolean force) { popupAlarm.cancelAllRequests(); if (myPopup != null && myPopup.isVisible() && (!isOverPopup || force)) { myPopup.cancel(); myPopup = null; } } private class Header extends JPanel { private final AttributedString titleString; private final AttributedString dotString; private final AttributedString shortcutString; private LineBreakMeasurer lineMeasurer; private TextLayout dotLayout; private TextLayout shortcutLayout; private final int paragraphStart; private final int paragraphEnd; private Header() { setOpaque(false); Font font = getFont(); Font titleFont = StringUtil.isNotEmpty(description) ? font.deriveFont(Font.BOLD) : font; Map<TextAttribute,?> tfa = titleFont.getAttributes(); titleString = new AttributedString(title, tfa); dotString = new AttributedString(DOTS, tfa); shortcutString = StringUtil.isNotEmpty(shortcut) ? new AttributedString(shortcut, font.getAttributes()) : null; AttributedCharacterIterator paragraph = titleString.getIterator(); paragraphStart = paragraph.getBeginIndex(); paragraphEnd = paragraph.getEndIndex(); // Compute preferred size FontMetrics tfm = getFontMetrics(titleFont); int titleWidth = SwingUtilities2.stringWidth(this, tfm, title); FontMetrics fm = getFontMetrics(font); titleWidth += StringUtil.isNotEmpty(shortcut) ? SPACE + SwingUtilities2.stringWidth(this, fm, shortcut) : 0; isMultiline = titleWidth > MAX_WIDTH; setPreferredSize(isMultiline ? new Dimension(MAX_WIDTH, tfm.getHeight() * 2) : new Dimension(titleWidth, fm.getHeight())); } @Override public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2 = (Graphics2D)g.create(); try { g2.setColor(FONT_COLOR); if (lineMeasurer == null) { FontRenderContext frc = g2.getFontRenderContext(); lineMeasurer = new LineBreakMeasurer(titleString.getIterator(), frc); LineBreakMeasurer dotMeasurer = new LineBreakMeasurer(dotString.getIterator(), frc); dotLayout = dotMeasurer.nextLayout(Float.POSITIVE_INFINITY); if (shortcutString != null) { LineBreakMeasurer shortcutMeasurer = new LineBreakMeasurer(shortcutString.getIterator(), frc); shortcutLayout = shortcutMeasurer.nextLayout(Float.POSITIVE_INFINITY); } } lineMeasurer.setPosition(paragraphStart); float breakWidth = getWidth(); float drawPosY = 0; int line = 0; TextLayout layout = null; while (lineMeasurer.getPosition() < paragraphEnd && line < 1) { layout = lineMeasurer.nextLayout(breakWidth); drawPosY += layout.getAscent(); layout.draw(g2, 0, drawPosY); drawPosY += layout.getDescent() + layout.getLeading(); line++; } if (lineMeasurer.getPosition() < paragraphEnd) { if (shortcutString != null) { breakWidth -= dotLayout.getAdvance() + SPACE + shortcutLayout.getAdvance(); } layout = lineMeasurer.nextLayout(breakWidth); drawPosY += layout.getAscent(); layout.draw(g2, 0, drawPosY); if (shortcutString != null) { dotLayout.draw(g2, layout.getAdvance(), drawPosY); g2.setColor(SHORTCUT_COLOR); shortcutLayout.draw(g2, layout.getAdvance() + dotLayout.getAdvance() + SPACE, drawPosY); } } else if (layout != null && shortcutString != null) { g2.setColor(SHORTCUT_COLOR); if (Float.compare(getWidth() - layout.getAdvance(), shortcutLayout.getAdvance() + SPACE) >= 0) { drawPosY = shortcutLayout.getAscent(); shortcutLayout.draw(g2, layout.getAdvance() + SPACE, drawPosY); } else { drawPosY += shortcutLayout.getAscent(); shortcutLayout.draw(g2, 0, drawPosY); } } } finally { g2.dispose(); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.forecast.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The weighted loss value for a quantile. This object is part of the <a>Metrics</a> object. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/forecast-2018-06-26/WeightedQuantileLoss" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class WeightedQuantileLoss implements Serializable, Cloneable, StructuredPojo { /** * <p> * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, if the * distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, and 0.8. * </p> */ private Double quantile; /** * <p> * The difference between the predicted value and the actual value over the quantile, weighted (normalized) by * dividing by the sum over all quantiles. * </p> */ private Double lossValue; /** * <p> * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, if the * distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, and 0.8. * </p> * * @param quantile * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, * if the distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, * and 0.8. */ public void setQuantile(Double quantile) { this.quantile = quantile; } /** * <p> * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, if the * distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, and 0.8. * </p> * * @return The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, * if the distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, * 0.6, and 0.8. */ public Double getQuantile() { return this.quantile; } /** * <p> * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, if the * distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, and 0.8. * </p> * * @param quantile * The quantile. Quantiles divide a probability distribution into regions of equal probability. For example, * if the distribution was divided into 5 regions of equal probability, the quantiles would be 0.2, 0.4, 0.6, * and 0.8. * @return Returns a reference to this object so that method calls can be chained together. */ public WeightedQuantileLoss withQuantile(Double quantile) { setQuantile(quantile); return this; } /** * <p> * The difference between the predicted value and the actual value over the quantile, weighted (normalized) by * dividing by the sum over all quantiles. * </p> * * @param lossValue * The difference between the predicted value and the actual value over the quantile, weighted (normalized) * by dividing by the sum over all quantiles. */ public void setLossValue(Double lossValue) { this.lossValue = lossValue; } /** * <p> * The difference between the predicted value and the actual value over the quantile, weighted (normalized) by * dividing by the sum over all quantiles. * </p> * * @return The difference between the predicted value and the actual value over the quantile, weighted (normalized) * by dividing by the sum over all quantiles. */ public Double getLossValue() { return this.lossValue; } /** * <p> * The difference between the predicted value and the actual value over the quantile, weighted (normalized) by * dividing by the sum over all quantiles. * </p> * * @param lossValue * The difference between the predicted value and the actual value over the quantile, weighted (normalized) * by dividing by the sum over all quantiles. * @return Returns a reference to this object so that method calls can be chained together. */ public WeightedQuantileLoss withLossValue(Double lossValue) { setLossValue(lossValue); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getQuantile() != null) sb.append("Quantile: ").append(getQuantile()).append(","); if (getLossValue() != null) sb.append("LossValue: ").append(getLossValue()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof WeightedQuantileLoss == false) return false; WeightedQuantileLoss other = (WeightedQuantileLoss) obj; if (other.getQuantile() == null ^ this.getQuantile() == null) return false; if (other.getQuantile() != null && other.getQuantile().equals(this.getQuantile()) == false) return false; if (other.getLossValue() == null ^ this.getLossValue() == null) return false; if (other.getLossValue() != null && other.getLossValue().equals(this.getLossValue()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getQuantile() == null) ? 0 : getQuantile().hashCode()); hashCode = prime * hashCode + ((getLossValue() == null) ? 0 : getLossValue().hashCode()); return hashCode; } @Override public WeightedQuantileLoss clone() { try { return (WeightedQuantileLoss) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.forecast.model.transform.WeightedQuantileLossMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ebi.spot.goci.service.junidecode; /** * Character map for Unicode characters with codepoint U+AFxx. * @author Giuseppe Cardone * @version 0.1 */ class Xaf { public static final String[] map = new String[]{ "ggyeols", // 0x00 "ggyeolt", // 0x01 "ggyeolp", // 0x02 "ggyeolh", // 0x03 "ggyeom", // 0x04 "ggyeob", // 0x05 "ggyeobs", // 0x06 "ggyeos", // 0x07 "ggyeoss", // 0x08 "ggyeong", // 0x09 "ggyeoj", // 0x0a "ggyeoc", // 0x0b "ggyeok", // 0x0c "ggyeot", // 0x0d "ggyeop", // 0x0e "ggyeoh", // 0x0f "ggye", // 0x10 "ggyeg", // 0x11 "ggyegg", // 0x12 "ggyegs", // 0x13 "ggyen", // 0x14 "ggyenj", // 0x15 "ggyenh", // 0x16 "ggyed", // 0x17 "ggyel", // 0x18 "ggyelg", // 0x19 "ggyelm", // 0x1a "ggyelb", // 0x1b "ggyels", // 0x1c "ggyelt", // 0x1d "ggyelp", // 0x1e "ggyelh", // 0x1f "ggyem", // 0x20 "ggyeb", // 0x21 "ggyebs", // 0x22 "ggyes", // 0x23 "ggyess", // 0x24 "ggyeng", // 0x25 "ggyej", // 0x26 "ggyec", // 0x27 "ggyek", // 0x28 "ggyet", // 0x29 "ggyep", // 0x2a "ggyeh", // 0x2b "ggo", // 0x2c "ggog", // 0x2d "ggogg", // 0x2e "ggogs", // 0x2f "ggon", // 0x30 "ggonj", // 0x31 "ggonh", // 0x32 "ggod", // 0x33 "ggol", // 0x34 "ggolg", // 0x35 "ggolm", // 0x36 "ggolb", // 0x37 "ggols", // 0x38 "ggolt", // 0x39 "ggolp", // 0x3a "ggolh", // 0x3b "ggom", // 0x3c "ggob", // 0x3d "ggobs", // 0x3e "ggos", // 0x3f "ggoss", // 0x40 "ggong", // 0x41 "ggoj", // 0x42 "ggoc", // 0x43 "ggok", // 0x44 "ggot", // 0x45 "ggop", // 0x46 "ggoh", // 0x47 "ggwa", // 0x48 "ggwag", // 0x49 "ggwagg", // 0x4a "ggwags", // 0x4b "ggwan", // 0x4c "ggwanj", // 0x4d "ggwanh", // 0x4e "ggwad", // 0x4f "ggwal", // 0x50 "ggwalg", // 0x51 "ggwalm", // 0x52 "ggwalb", // 0x53 "ggwals", // 0x54 "ggwalt", // 0x55 "ggwalp", // 0x56 "ggwalh", // 0x57 "ggwam", // 0x58 "ggwab", // 0x59 "ggwabs", // 0x5a "ggwas", // 0x5b "ggwass", // 0x5c "ggwang", // 0x5d "ggwaj", // 0x5e "ggwac", // 0x5f "ggwak", // 0x60 "ggwat", // 0x61 "ggwap", // 0x62 "ggwah", // 0x63 "ggwae", // 0x64 "ggwaeg", // 0x65 "ggwaegg", // 0x66 "ggwaegs", // 0x67 "ggwaen", // 0x68 "ggwaenj", // 0x69 "ggwaenh", // 0x6a "ggwaed", // 0x6b "ggwael", // 0x6c "ggwaelg", // 0x6d "ggwaelm", // 0x6e "ggwaelb", // 0x6f "ggwaels", // 0x70 "ggwaelt", // 0x71 "ggwaelp", // 0x72 "ggwaelh", // 0x73 "ggwaem", // 0x74 "ggwaeb", // 0x75 "ggwaebs", // 0x76 "ggwaes", // 0x77 "ggwaess", // 0x78 "ggwaeng", // 0x79 "ggwaej", // 0x7a "ggwaec", // 0x7b "ggwaek", // 0x7c "ggwaet", // 0x7d "ggwaep", // 0x7e "ggwaeh", // 0x7f "ggoe", // 0x80 "ggoeg", // 0x81 "ggoegg", // 0x82 "ggoegs", // 0x83 "ggoen", // 0x84 "ggoenj", // 0x85 "ggoenh", // 0x86 "ggoed", // 0x87 "ggoel", // 0x88 "ggoelg", // 0x89 "ggoelm", // 0x8a "ggoelb", // 0x8b "ggoels", // 0x8c "ggoelt", // 0x8d "ggoelp", // 0x8e "ggoelh", // 0x8f "ggoem", // 0x90 "ggoeb", // 0x91 "ggoebs", // 0x92 "ggoes", // 0x93 "ggoess", // 0x94 "ggoeng", // 0x95 "ggoej", // 0x96 "ggoec", // 0x97 "ggoek", // 0x98 "ggoet", // 0x99 "ggoep", // 0x9a "ggoeh", // 0x9b "ggyo", // 0x9c "ggyog", // 0x9d "ggyogg", // 0x9e "ggyogs", // 0x9f "ggyon", // 0xa0 "ggyonj", // 0xa1 "ggyonh", // 0xa2 "ggyod", // 0xa3 "ggyol", // 0xa4 "ggyolg", // 0xa5 "ggyolm", // 0xa6 "ggyolb", // 0xa7 "ggyols", // 0xa8 "ggyolt", // 0xa9 "ggyolp", // 0xaa "ggyolh", // 0xab "ggyom", // 0xac "ggyob", // 0xad "ggyobs", // 0xae "ggyos", // 0xaf "ggyoss", // 0xb0 "ggyong", // 0xb1 "ggyoj", // 0xb2 "ggyoc", // 0xb3 "ggyok", // 0xb4 "ggyot", // 0xb5 "ggyop", // 0xb6 "ggyoh", // 0xb7 "ggu", // 0xb8 "ggug", // 0xb9 "ggugg", // 0xba "ggugs", // 0xbb "ggun", // 0xbc "ggunj", // 0xbd "ggunh", // 0xbe "ggud", // 0xbf "ggul", // 0xc0 "ggulg", // 0xc1 "ggulm", // 0xc2 "ggulb", // 0xc3 "gguls", // 0xc4 "ggult", // 0xc5 "ggulp", // 0xc6 "ggulh", // 0xc7 "ggum", // 0xc8 "ggub", // 0xc9 "ggubs", // 0xca "ggus", // 0xcb "gguss", // 0xcc "ggung", // 0xcd "gguj", // 0xce "gguc", // 0xcf "gguk", // 0xd0 "ggut", // 0xd1 "ggup", // 0xd2 "gguh", // 0xd3 "ggweo", // 0xd4 "ggweog", // 0xd5 "ggweogg", // 0xd6 "ggweogs", // 0xd7 "ggweon", // 0xd8 "ggweonj", // 0xd9 "ggweonh", // 0xda "ggweod", // 0xdb "ggweol", // 0xdc "ggweolg", // 0xdd "ggweolm", // 0xde "ggweolb", // 0xdf "ggweols", // 0xe0 "ggweolt", // 0xe1 "ggweolp", // 0xe2 "ggweolh", // 0xe3 "ggweom", // 0xe4 "ggweob", // 0xe5 "ggweobs", // 0xe6 "ggweos", // 0xe7 "ggweoss", // 0xe8 "ggweong", // 0xe9 "ggweoj", // 0xea "ggweoc", // 0xeb "ggweok", // 0xec "ggweot", // 0xed "ggweop", // 0xee "ggweoh", // 0xef "ggwe", // 0xf0 "ggweg", // 0xf1 "ggwegg", // 0xf2 "ggwegs", // 0xf3 "ggwen", // 0xf4 "ggwenj", // 0xf5 "ggwenh", // 0xf6 "ggwed", // 0xf7 "ggwel", // 0xf8 "ggwelg", // 0xf9 "ggwelm", // 0xfa "ggwelb", // 0xfb "ggwels", // 0xfc "ggwelt", // 0xfd "ggwelp", // 0xfe "ggwelh" // 0xff }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import java.lang.Math; import java.lang.ref.*; /** * An <code>ObjIdMap</code> maps GemFire object ids to an * <code>Object</code>. This is an optimization because using a * {@link java.util.HashMap} for this purposed proved to be too slow * because of all of the {@link Integer}s that had to be created. */ public class ObjIdMap { /** The contents of the map */ protected Entry[] table; /** The total number of mappings in the map */ private int count; /** Once the number of mappings in the map exceeds the threshold, * the map is rehashed. The threshold is the * (capacity * loadFactor). capacity is table.length */ private int threshold; /** The load factor of the map */ private float loadFactor; public final Object rehashLock = new Object(); //////////////////// Constructors //////////////////// /** * Creates a new, empty map with the given initial capacity (number * of buckets) and load factor. */ public ObjIdMap(int initialCapacity, float loadFactor) { if (initialCapacity < 0) { throw new IllegalArgumentException(LocalizedStrings.ObjIdMap_ILLEGAL_INITIAL_CAPACITY_0.toLocalizedString(Integer.valueOf(initialCapacity))); } if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException(LocalizedStrings.ObjIdMap_ILLEGAL_LOAD_FACTOR_0.toLocalizedString(new Float(loadFactor))); } if (initialCapacity==0) { initialCapacity = 1; } this.loadFactor = loadFactor; table = new Entry[initialCapacity]; threshold = (int)(initialCapacity * loadFactor); } /** * Creates a new, empty map with the default initial capacity (11 * buckets) and load factor (0.75). */ public ObjIdMap() { this(11, 0.75f); } /** * Create a new map which will contain all the contents of the oldMap * and then add the specified key and value. */ public ObjIdMap(ObjIdMap oldMap, int addKey, Object addValue) { this.loadFactor = oldMap.loadFactor; // we do a +2 to make enough room for one more entry // since we think the loadFactory is 0.5 rehash(oldMap.table, oldMap.count, oldMap.count + 2); put(addKey, addValue); } /** * Create a new map which will contain all the contents of the oldMap. */ public ObjIdMap(ObjIdMap oldMap) { this.table = new Entry[oldMap.table.length]; System.arraycopy(oldMap.table, 0, this.table, 0, this.table.length); this.count = oldMap.count; this.threshold = oldMap.threshold; this.loadFactor = oldMap.loadFactor; } //////////////////// Instance Methods //////////////////// /** * Returns the number of mappings in this map */ public int size() { return this.count; } /** * Returns <code>true</code> if this map contains a mapping for the * given key. * * @throws IllegalArgumentException * <code>key</code> is less than zero */ public boolean containsKey(int key) { Entry[] table = this.table; int bucket = Math.abs(key) % table.length; for (Entry e = table[bucket]; e != null; e = e.next) { if (e.key == key) { return true; } } return false; } /** * Returns the object to which the given key is mapped. If no * object is mapped to the given key, <code>null</code> is returned. * * @throws IllegalArgumentException * <code>key</code> is less than zero */ public Object get(int key) { Entry[] table = this.table; int bucket = Math.abs(key) % table.length; for (Entry e = table[bucket]; e != null; e = e.next) { if (e.key == key) { return e.value; } } return null; } /** * Rehashes this map into a new map with a large number of buckets. * It is called when the number of entries in the map exceeds the * capacity and load factor. */ private void rehash() { rehash(this.table, this.count, this.count * 2 + 1); } private void rehash(Entry[] oldMap, int newCount, int newCapacity) { int oldCapacity = oldMap.length; Entry newMap[] = new Entry[newCapacity]; synchronized (rehashLock) { for (int i = oldCapacity ; i-- > 0 ;) { for (Entry old = oldMap[i] ; old != null ; ) { Entry e = old; old = old.next; if (e.value != null && e.value instanceof WeakReference) { WeakReference r = (WeakReference)e.value; if (r.get() == null) { // don't copy this one into the new table since its value was gc'd newCount--; continue; } } int index = Math.abs(e.key) % newCapacity; e.next = newMap[index]; newMap[index] = e; } } threshold = (int)(newCapacity * loadFactor); count = newCount; table = newMap; } } /** * Creates a mapping between the given key (object id) and an * object. Returns the previous value, or <code>null</code> if * there was none. * * @throws IllegalArgumentException * <code>key</code> is less than zero */ public Object put(int key, Object value) { // Is the key already in the table? int bucket = Math.abs(key) % table.length; for (Entry e = table[bucket]; e != null; e = e.next) { if (e.key == key) { Object old = e.value; e.value = value; return old; } } // Adjust the table, if necessary if (this.count >= this.threshold) { rehash(); // table = this.table; assignment has no effect bucket = Math.abs(key) % table.length; } Entry e = new Entry(); e.key = key; e.value = value; e.next = table[bucket]; table[bucket] = e; count++; return null; } /** * Removes the mapping for the given key. Returns the object to * which the key was mapped, or <code>null</code> otherwise. */ public Object remove(int key) { Entry[] table = this.table; int bucket = Math.abs(key) % table.length; for (Entry e = table[bucket], prev = null; e != null; prev = e, e = e.next) { if (key == e.key) { if (prev != null) prev.next = e.next; else table[bucket] = e.next; count--; Object oldValue = e.value; e.value = null; return oldValue; } } return null; } /** * Returns all of the objects in the map */ public Object[] values() { Object[] values = new Object[this.size()]; Entry[] table = this.table; int i = 0; for (int bucket = 0; bucket < table.length; bucket++) { for (Entry e = table[bucket]; e != null; e = e.next) { values[i++] = e.value; } } return values; } /** * Returns an iterator over the {@link Entry}s of this map. Note * that this iterator is <b>not</b> fail-fast. That is, it is the * user's responsibility to ensure that the map does not change * while he is iterating over it. */ public EntryIterator iterator() { return new EntryIterator(); } /////////////////////// Inner Classes /////////////////////// /** * Inner class that represents an entry in the map */ public static class Entry { /** The key of the entry */ int key; /** The value of the entry */ Object value; /** The next entry in the collision chain */ Entry next; public int getKey() { return this.key; } public Object getValue() { return this.value; } } /** * A class for iterating over the contents of an * <code>ObjIdMap</code> */ public class EntryIterator { /** The current collision chain we're traversing */ private int index = 0; /** The next Entry we'll iterate over */ private Entry next = null; //////////////////// Instance Methods //////////////////// /** * Returns the next Entry to visit. Will return <code>null</code> * after we have iterated through all of the entries. */ public Entry next() { while (this.next == null && this.index < table.length) { if (table[index] != null) { this.next = table[index]; } this.index++; } Entry oldNext = this.next; if (oldNext != null) { this.next = oldNext.next; } return oldNext; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.network.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.PollerFlux; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.network.fluent.models.RoutingIntentInner; import java.nio.ByteBuffer; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in RoutingIntentsClient. */ public interface RoutingIntentsClient { /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub along with {@link Response} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> createOrUpdateWithResponseAsync( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link PollerFlux} for polling of the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<RoutingIntentInner>, RoutingIntentInner> beginCreateOrUpdateAsync( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RoutingIntentInner>, RoutingIntentInner> beginCreateOrUpdate( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<RoutingIntentInner>, RoutingIntentInner> beginCreateOrUpdate( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters, Context context); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<RoutingIntentInner> createOrUpdateAsync( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.SINGLE) RoutingIntentInner createOrUpdate( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters); /** * Creates a RoutingIntent resource if it doesn't exist else updates the existing RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the per VirtualHub singleton Routing Intent resource. * @param routingIntentParameters Parameters supplied to create or update RoutingIntent. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.SINGLE) RoutingIntentInner createOrUpdate( String resourceGroupName, String virtualHubName, String routingIntentName, RoutingIntentInner routingIntentParameters, Context context); /** * Retrieves the details of a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub along with {@link Response} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<RoutingIntentInner>> getWithResponseAsync( String resourceGroupName, String virtualHubName, String routingIntentName); /** * Retrieves the details of a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<RoutingIntentInner> getAsync(String resourceGroupName, String virtualHubName, String routingIntentName); /** * Retrieves the details of a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub. */ @ServiceMethod(returns = ReturnType.SINGLE) RoutingIntentInner get(String resourceGroupName, String virtualHubName, String routingIntentName); /** * Retrieves the details of a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the routing intent child resource of a Virtual hub along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<RoutingIntentInner> getWithResponse( String resourceGroupName, String virtualHubName, String routingIntentName, Context context); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync( String resourceGroupName, String virtualHubName, String routingIntentName); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link PollerFlux} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<Void>, Void> beginDeleteAsync( String resourceGroupName, String virtualHubName, String routingIntentName); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String virtualHubName, String routingIntentName); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String virtualHubName, String routingIntentName, Context context); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteAsync(String resourceGroupName, String virtualHubName, String routingIntentName); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String virtualHubName, String routingIntentName); /** * Deletes a RoutingIntent. * * @param resourceGroupName The resource group name of the RoutingIntent. * @param virtualHubName The name of the VirtualHub. * @param routingIntentName The name of the RoutingIntent. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String virtualHubName, String routingIntentName, Context context); /** * Retrieves the details of all RoutingIntent child resources of the VirtualHub. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of the routing intent result and a URL nextLink to get the next set of results as paginated response * with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<RoutingIntentInner> listAsync(String resourceGroupName, String virtualHubName); /** * Retrieves the details of all RoutingIntent child resources of the VirtualHub. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of the routing intent result and a URL nextLink to get the next set of results as paginated response * with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RoutingIntentInner> list(String resourceGroupName, String virtualHubName); /** * Retrieves the details of all RoutingIntent child resources of the VirtualHub. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list of the routing intent result and a URL nextLink to get the next set of results as paginated response * with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<RoutingIntentInner> list(String resourceGroupName, String virtualHubName, Context context); }
/* * Copyright (C) 2021-2022 Objectos Software LTDA. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.objectos.core.map; import br.com.objectos.core.array.ObjectArrays; import br.com.objectos.core.object.Checks; import java.util.Arrays; import java.util.Map; /** * A hash-based implementation of the {@link Map} interface. * * @param <K> type of the keys in this map * @param <V> type of the values in this map */ public class MutableMap<K, V> extends AbstractArrayBasedMap<K, V> { private static final float DEFAULT_LOAD_FACTOR = 0.75F; private static final int FIRST_RESIZE = 8; private static final int MAX_ARRAY_LENGTH = MAX_POSITIVE_POWER_OF_TWO; private final float loadFactor = DEFAULT_LOAD_FACTOR; private int rehashSize; /** * Creates a new {@code MutableMap} instance. */ public MutableMap() {} /** * Creates and returns a new {@code MutableMap} instance. * * <p> * This method is mainly provided as a convenience for Java Multi-Release * codebases. In particular codebases that must support versions prior to Java * 7 and, therefore, cannot use the diamond operator. * * @param <K> type of the keys in this map * @param <V> type of the values in this map * * @return a new {@code MutableMap} instance */ public static <K, V> MutableMap<K, V> create() { return new MutableMap<K, V>(); } /** * Removes all of the mappings in this map. */ @Override public void clear() { Arrays.fill(array, null); size = 0; } /** * Associates the specified value with the specified key in this map. If the * map previously contained a mapping for the key, the old value is replaced * by the specified value. * * <p> * A map {@code m} is said to contain a mapping for a key {@code k} if and * only if {@link #containsKey(Object) m.containsKey(k)} would return * {@code true}.) * * @param key * key with which the specified value is to be associated * @param value * value to be associated with the specified key * * @return the previous value associated with {@code key}, or * {@code null} if there was no mapping for {@code key}. * * @throws NullPointerException if the specified key or value is null */ @Override public final V put(K key, V value) { Checks.checkNotNull(key, "key == null"); Checks.checkNotNull(value, "value == null"); return putUnchecked(key, value); } /** * Not implemented in this release. It might be implemented in a future * release. * * @param m * ignored (this operation in not yet implemented) * * @throws UnsupportedOperationException * this method may be implemented in a future release */ @Override public final void putAll(Map<? extends K, ? extends V> m) { throw new UnsupportedOperationException("Not yet implemented"); } /** * Returns an {@link ImmutableMap} copy of this map. * * <p> * The returned {@code ImmutableMap} will contain all of the entries from this * map. * * <p> * The returned map will be a copy in the sense that, after this method * returns, modifying this map will have no effect on the returned (copied) * one. * * <p> * Note, however, that the behaviour of this method is undefined if this map * is modified while the copy is being made. * * @return an {@link ImmutableMap} copy of this set */ public ImmutableMap<K, V> toImmutableMap() { switch (size) { case 0: return ImmutableMap.empty(); default: Object[] copy; copy = Arrays.copyOf(array, array.length); return new ImmutableMap<K, V>(copy, size); } } void insert(int index, Object key, Object value) { set(index, key, value); size++; rehashIfNecessary(); } final V putUnchecked(Object key, Object value) { firstResizeIfNecessary(); int index, marker; index = marker = hashIndex(key); Object existing; existing = array[index]; if (existing == null) { insert(index, key, value); return null; } else if (existing.equals(key)) { return replace(index, key, value); } else { index = index + 2; } while (index < array.length) { existing = array[index]; if (existing == null) { insert(index, key, value); return null; } else if (existing.equals(key)) { return replace(index, key, value); } else { index = index + 2; } } index = 0; while (index < marker) { existing = array[index]; if (existing == null) { insert(index, key, value); return null; } else if (existing.equals(key)) { return replace(index, key, value); } else { index = index + 2; } } throw new UnsupportedOperationException("Implement me"); } @SuppressWarnings("unchecked") V replace(int keyIndex, Object key, Object value) { int valueIndex; valueIndex = keyIndex + 1; Object existingValue; existingValue = array[valueIndex]; array[valueIndex] = value; return (V) existingValue; } private void firstResizeIfNecessary() { if (array == ObjectArrays.empty()) { resizeTo(FIRST_RESIZE); } } private void rehashIfNecessary() { if (size < rehashSize) { return; } if (array.length == MAX_ARRAY_LENGTH) { throw new OutOfMemoryError("backing array already at max allowed length"); } Object[] previous; previous = array; int newLength; newLength = array.length << 1; if (newLength < 0) { newLength = MAX_ARRAY_LENGTH; } resizeTo(newLength); for (int i = 0, length = previous.length; i < length; i = i + 2) { Object key; key = previous[i]; if (key == null) { continue; } Object value; value = previous[i + 1]; rehashPut(key, value); } } private void rehashPut(Object key, Object value) { int index, marker; index = marker = hashIndex(key); Object existing; existing = array[index]; if (existing == null) { set(index, key, value); return; } else { index = index + 2; } while (index < array.length) { existing = array[index]; if (existing == null) { set(index, key, value); return; } else { index = index + 2; } } index = 0; while (index < marker) { existing = array[index]; if (existing == null) { set(index, key, value); return; } else { index = index + 2; } } throw new UnsupportedOperationException("Implement me"); } private void resizeTo(int size) { array = new Object[size]; int hashLength; hashLength = size >> 1; hashMask = hashLength - 1; rehashSize = (int) (hashLength * loadFactor); } private void set(int index, Object key, Object value) { array[index] = key; array[index + 1] = value; } }
/* * Copyright 2014-2022 Web Firm Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * @author WFF */ package com.webfirmframework.wffweb.css.css3; import java.util.Arrays; import java.util.List; import com.webfirmframework.wffweb.InvalidValueException; import com.webfirmframework.wffweb.NullValueException; import com.webfirmframework.wffweb.css.CssNameConstants; import com.webfirmframework.wffweb.css.core.AbstractCssProperty; import com.webfirmframework.wffweb.util.StringUtil; import com.webfirmframework.wffweb.util.TagStringUtil; /** * <pre> * -moz-column-count: <i>number</i>|auto|initial|inherit; * * The -moz-column-count property specifies the number of columns an element should be divided into. * Default value: auto * Inherited: no * Animatable: yes * Version: CSS3 * JavaScript syntax: object.style.mozColumnCount=3 * </pre> * * @author WFF * @since 1.0.0 */ public class MozColumnCount extends AbstractCssProperty<MozColumnCount> { private static final long serialVersionUID = 1_0_0L; public static final String INITIAL = "initial"; public static final String INHERIT = "inherit"; public static final String AUTO = "auto"; private static final List<String> PREDEFINED_CONSTANTS = Arrays.asList(INITIAL, INHERIT, AUTO); private String cssValue; private Integer value; /** * The default value 1 will be set as the cssValue. * * @author WFF * @since 1.0.0 */ public MozColumnCount() { cssValue = AUTO; } /** * @param cssValue the css value to set. */ public MozColumnCount(final String cssValue) { setCssValue(cssValue); } /** * @param mozColumnCount the {@code Opacity} object from which the cssValue to * set.And, {@code null} will throw * {@code NullValueException} */ public MozColumnCount(final MozColumnCount mozColumnCount) { if (mozColumnCount == null) { throw new NullValueException("mozColumnCount can not be null"); } setCssValue(mozColumnCount.getCssValue()); } /** * @param value */ public MozColumnCount(final int value) { this.value = value; cssValue = String.valueOf(value); } /* * (non-Javadoc) * * @see com.webfirmframework.wffweb.css.CssProperty#getCssName() * * @since 1.0.0 * * @author WFF */ @Override public String getCssName() { return CssNameConstants.MOZ_COLUMN_COUNT; } /* * (non-Javadoc) * * @see com.webfirmframework.wffweb.css.CssProperty#getCssValue() * * @since 1.0.0 * * @author WFF */ @Override public String getCssValue() { return cssValue; } @Override public String toString() { return getCssName() + ": " + getCssValue(); } /** * gets the mozColumnCount in {@code Integer} value. * * @return the value in int or null if the cssValue is <code>initial</code> or * <code>inherit</code>. * @since 1.0.0 * @author WFF */ public Integer getValue() { return value; } /** * @param value the value to set * @author WFF * @since 1.0.0 */ public void setValue(final int value) { this.value = value; cssValue = String.valueOf(value); if (getStateChangeInformer() != null) { getStateChangeInformer().stateChanged(this); } } /** * @param cssValue the value should be in the format of <code>0.5</code>, * <code>initial/inherit</code>. {@code null} is considered as * an invalid value and it will throw * {@code NullValueException}. * @since 1.0.0 * @author WFF */ @Override public MozColumnCount setCssValue(final String cssValue) { if (cssValue == null) { throw new NullValueException( "null is an invalid value. The value format should be as for example 0.5, initial/inherit."); } else { final String trimmedCssValue = TagStringUtil.toLowerCase(StringUtil.strip(cssValue)); if (PREDEFINED_CONSTANTS.contains(trimmedCssValue)) { this.cssValue = trimmedCssValue; value = null; } else { try { value = Integer.valueOf(trimmedCssValue); this.cssValue = value.toString(); } catch (final NumberFormatException e) { throw new InvalidValueException(cssValue + " is an invalid value. The value format should be as for example 0.5, initial, inherit etc.."); } } } if (getStateChangeInformer() != null) { getStateChangeInformer().stateChanged(this); } return this; } /** * sets as {@code initial} * * @since 1.0.0 * @author WFF */ public void setAsInitial() { setCssValue(INITIAL); } /** * sets as {@code inherit} * * @since 1.0.0 * @author WFF */ public void setAsInherit() { setCssValue(INHERIT); } /** * sets as {@code auto} * * @since 1.0.0 * @author WFF */ public void setAsAuto() { setCssValue(AUTO); } /** * validates if the given cssValue is valid for this class. * * @param cssValue the value to check. * @return true if valid and false if invalid. * @author WFF * @since 1.0.0 */ public static boolean isValid(final String cssValue) { final String trimmedCssValue = TagStringUtil.toLowerCase(StringUtil.strip(cssValue)); if (StringUtil.containsSpace(trimmedCssValue)) { return false; } try { final int parsedValue = Integer.parseInt(trimmedCssValue); return !(parsedValue == 0 && (StringUtil.containsMinus(trimmedCssValue) || StringUtil.containsPlus(trimmedCssValue))); } catch (final NumberFormatException e) { // NOP } return PREDEFINED_CONSTANTS.contains(trimmedCssValue); } }
package com.lothrazar.cyclic.block.melter; import java.util.List; import java.util.function.Predicate; import com.lothrazar.cyclic.block.TileBlockEntityCyclic; import com.lothrazar.cyclic.capabilities.CustomEnergyStorage; import com.lothrazar.cyclic.capabilities.FluidTankBase; import com.lothrazar.cyclic.data.Const; import com.lothrazar.cyclic.recipe.CyclicRecipeType; import com.lothrazar.cyclic.registry.TileRegistry; import net.minecraft.core.BlockPos; import net.minecraft.core.Direction; import net.minecraft.nbt.CompoundTag; import net.minecraft.network.chat.Component; import net.minecraft.network.chat.TextComponent; import net.minecraft.world.MenuProvider; import net.minecraft.world.entity.player.Inventory; import net.minecraft.world.entity.player.Player; import net.minecraft.world.inventory.AbstractContainerMenu; import net.minecraft.world.item.ItemStack; import net.minecraft.world.level.Level; import net.minecraft.world.level.block.entity.BlockEntity; import net.minecraft.world.level.block.state.BlockState; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.common.util.LazyOptional; import net.minecraftforge.energy.CapabilityEnergy; import net.minecraftforge.energy.IEnergyStorage; import net.minecraftforge.fluids.FluidAttributes; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.capability.CapabilityFluidHandler; import net.minecraftforge.fluids.capability.IFluidHandler.FluidAction; import net.minecraftforge.items.CapabilityItemHandler; import net.minecraftforge.items.IItemHandler; import net.minecraftforge.items.ItemStackHandler; @SuppressWarnings("rawtypes") public class TileMelter extends TileBlockEntityCyclic implements MenuProvider { static enum Fields { REDSTONE, TIMER, RENDER; } static final int MAX = 64000; public static final int CAPACITY = 64 * FluidAttributes.BUCKET_VOLUME; public static final int TRANSFER_FLUID_PER_TICK = FluidAttributes.BUCKET_VOLUME / 20; public static final int TIMER_FULL = Const.TICKS_PER_SEC * 3; public FluidTankBase tank = new FluidTankBase(this, CAPACITY, isFluidValid()); LazyOptional<FluidTankBase> fluidCap = LazyOptional.of(() -> tank); CustomEnergyStorage energy = new CustomEnergyStorage(MAX, MAX); ItemStackHandler inventory = new ItemStackHandler(2); private LazyOptional<IEnergyStorage> energyCap = LazyOptional.of(() -> energy); private LazyOptional<IItemHandler> inventoryCap = LazyOptional.of(() -> inventory); private RecipeMelter currentRecipe; public TileMelter(BlockPos pos, BlockState state) { super(TileRegistry.MELTER.get(), pos, state); } public static void serverTick(Level level, BlockPos blockPos, BlockState blockState, TileMelter e) { e.tick(); } public static <E extends BlockEntity> void clientTick(Level level, BlockPos blockPos, BlockState blockState, TileMelter e) { e.tick(); } public void tick() { this.syncEnergy(); this.findMatchingRecipe(); if (currentRecipe == null) { return; } this.timer--; if (timer < 0) { timer = 0; } final int cost = this.currentRecipe.getEnergyCost(); //======= // final int cost = POWERCONF.get(); //>>>>>>> 54f4445a2d7902cf4ef454efe328c9667ca5b652 if (energy.getEnergyStored() < cost && cost > 0) { this.timer = 0; return; } if (currentRecipe == null || !currentRecipe.matches(this, level)) { this.findMatchingRecipe(); if (currentRecipe == null) { this.timer = 0; return; } } if (--this.timer < 0) { timer = 0; } if (timer == 0 && this.tryProcessRecipe()) { this.timer = TIMER_FULL; energy.extractEnergy(cost, false); } } @Override public void setField(int field, int value) { switch (Fields.values()[field]) { case TIMER: this.timer = value; break; case REDSTONE: this.needsRedstone = value % 2; break; case RENDER: this.render = value % 2; break; } } @Override public int getField(int field) { switch (Fields.values()[field]) { case TIMER: return timer; case REDSTONE: return this.needsRedstone; case RENDER: return this.render; } return 0; } public Predicate<FluidStack> isFluidValid() { return p -> true; } @Override public Component getDisplayName() { return new TextComponent(getType().getRegistryName().getPath()); } @Override public AbstractContainerMenu createMenu(int i, Inventory playerInventory, Player playerEntity) { return new ContainerMelter(i, level, worldPosition, playerInventory, playerEntity); } @Override public void load(CompoundTag tag) { tank.readFromNBT(tag.getCompound(NBTFLUID)); energy.deserializeNBT(tag.getCompound(NBTENERGY)); inventory.deserializeNBT(tag.getCompound(NBTINV)); super.load(tag); } @Override public void saveAdditional(CompoundTag tag) { CompoundTag fluid = new CompoundTag(); tank.writeToNBT(fluid); tag.put(NBTFLUID, fluid); tag.put(NBTENERGY, energy.serializeNBT()); tag.put(NBTINV, inventory.serializeNBT()); super.saveAdditional(tag); } @Override public void invalidateCaps() { energyCap.invalidate(); inventoryCap.invalidate(); fluidCap.invalidate(); super.invalidateCaps(); } @Override public <T> LazyOptional<T> getCapability(Capability<T> cap, Direction side) { if (cap == CapabilityFluidHandler.FLUID_HANDLER_CAPABILITY) { return fluidCap.cast(); } if (cap == CapabilityEnergy.ENERGY) { return energyCap.cast(); } if (cap == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY) { return inventoryCap.cast(); } return super.getCapability(cap, side); } public float getCapacity() { return CAPACITY; } @Override public FluidStack getFluid() { return tank == null ? FluidStack.EMPTY : tank.getFluid(); } @Override public void setFluid(FluidStack fluid) { tank.setFluid(fluid); } public ItemStack getStackInputSlot(int slot) { IItemHandler inv = inventoryCap.orElse(null); return (inv == null) ? ItemStack.EMPTY : inv.getStackInSlot(slot); } private void findMatchingRecipe() { if (currentRecipe != null && currentRecipe.matches(this, level)) { return; } currentRecipe = null; List<RecipeMelter<TileBlockEntityCyclic>> recipes = level.getRecipeManager().getAllRecipesFor(CyclicRecipeType.MELTER); for (RecipeMelter rec : recipes) { if (rec.matches(this, level)) { if (this.tank.getFluid() != null && !this.tank.getFluid().isEmpty()) { if (rec.getRecipeFluid().getFluid() != this.tank.getFluid().getFluid()) { continue; //fluid wont fit } } currentRecipe = rec; this.timer = TIMER_FULL; return; } } } private boolean tryProcessRecipe() { int test = tank.fill(this.currentRecipe.getRecipeFluid(), FluidAction.SIMULATE); if (test == this.currentRecipe.getRecipeFluid().getAmount() && currentRecipe.matches(this, level)) { //ok it has room for all the fluid none will be wasted inventory.getStackInSlot(0).shrink(1); inventory.getStackInSlot(1).shrink(1); tank.fill(this.currentRecipe.getRecipeFluid(), FluidAction.EXECUTE); return true; } return false; } }
/* * (C) Copyright IBM Corp. 2018, 2021. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.ibm.watson.text_to_speech.v1.model; import com.ibm.cloud.sdk.core.service.model.GenericModel; import java.util.List; /** The synthesize options. */ public class SynthesizeOptions extends GenericModel { /** * The voice to use for synthesis. For more information about specifying a voice, see **Important * voice updates for IBM Cloud** in the method description. * * <p>**IBM Cloud:** The Arabic, Chinese, Dutch, Australian English, and Korean languages and * voices are supported only for IBM Cloud. * * <p>**See also:** See also [Using languages and * voices](https://cloud.ibm.com/docs/text-to-speech?topic=text-to-speech-voices). */ public interface Voice { /** ar-AR_OmarVoice. */ String AR_AR_OMARVOICE = "ar-AR_OmarVoice"; /** ar-MS_OmarVoice. */ String AR_MS_OMARVOICE = "ar-MS_OmarVoice"; /** de-DE_BirgitVoice. */ String DE_DE_BIRGITVOICE = "de-DE_BirgitVoice"; /** de-DE_BirgitV3Voice. */ String DE_DE_BIRGITV3VOICE = "de-DE_BirgitV3Voice"; /** de-DE_DieterVoice. */ String DE_DE_DIETERVOICE = "de-DE_DieterVoice"; /** de-DE_DieterV3Voice. */ String DE_DE_DIETERV3VOICE = "de-DE_DieterV3Voice"; /** de-DE_ErikaV3Voice. */ String DE_DE_ERIKAV3VOICE = "de-DE_ErikaV3Voice"; /** en-AU-CraigVoice. */ String EN_AU_CRAIGVOICE = "en-AU-CraigVoice"; /** en-AU-MadisonVoice. */ String EN_AU_MADISONVOICE = "en-AU-MadisonVoice"; /** en-GB_CharlotteV3Voice. */ String EN_GB_CHARLOTTEV3VOICE = "en-GB_CharlotteV3Voice"; /** en-GB_JamesV3Voice. */ String EN_GB_JAMESV3VOICE = "en-GB_JamesV3Voice"; /** en-GB_KateVoice. */ String EN_GB_KATEVOICE = "en-GB_KateVoice"; /** en-GB_KateV3Voice. */ String EN_GB_KATEV3VOICE = "en-GB_KateV3Voice"; /** en-US_AllisonVoice. */ String EN_US_ALLISONVOICE = "en-US_AllisonVoice"; /** en-US_AllisonV3Voice. */ String EN_US_ALLISONV3VOICE = "en-US_AllisonV3Voice"; /** en-US_EmilyV3Voice. */ String EN_US_EMILYV3VOICE = "en-US_EmilyV3Voice"; /** en-US_HenryV3Voice. */ String EN_US_HENRYV3VOICE = "en-US_HenryV3Voice"; /** en-US_KevinV3Voice. */ String EN_US_KEVINV3VOICE = "en-US_KevinV3Voice"; /** en-US_LisaVoice. */ String EN_US_LISAVOICE = "en-US_LisaVoice"; /** en-US_LisaV3Voice. */ String EN_US_LISAV3VOICE = "en-US_LisaV3Voice"; /** en-US_MichaelVoice. */ String EN_US_MICHAELVOICE = "en-US_MichaelVoice"; /** en-US_MichaelV3Voice. */ String EN_US_MICHAELV3VOICE = "en-US_MichaelV3Voice"; /** en-US_OliviaV3Voice. */ String EN_US_OLIVIAV3VOICE = "en-US_OliviaV3Voice"; /** es-ES_EnriqueVoice. */ String ES_ES_ENRIQUEVOICE = "es-ES_EnriqueVoice"; /** es-ES_EnriqueV3Voice. */ String ES_ES_ENRIQUEV3VOICE = "es-ES_EnriqueV3Voice"; /** es-ES_LauraVoice. */ String ES_ES_LAURAVOICE = "es-ES_LauraVoice"; /** es-ES_LauraV3Voice. */ String ES_ES_LAURAV3VOICE = "es-ES_LauraV3Voice"; /** es-LA_SofiaVoice. */ String ES_LA_SOFIAVOICE = "es-LA_SofiaVoice"; /** es-LA_SofiaV3Voice. */ String ES_LA_SOFIAV3VOICE = "es-LA_SofiaV3Voice"; /** es-US_SofiaVoice. */ String ES_US_SOFIAVOICE = "es-US_SofiaVoice"; /** es-US_SofiaV3Voice. */ String ES_US_SOFIAV3VOICE = "es-US_SofiaV3Voice"; /** fr-CA_LouiseV3Voice. */ String FR_CA_LOUISEV3VOICE = "fr-CA_LouiseV3Voice"; /** fr-FR_NicolasV3Voice. */ String FR_FR_NICOLASV3VOICE = "fr-FR_NicolasV3Voice"; /** fr-FR_ReneeVoice. */ String FR_FR_RENEEVOICE = "fr-FR_ReneeVoice"; /** fr-FR_ReneeV3Voice. */ String FR_FR_RENEEV3VOICE = "fr-FR_ReneeV3Voice"; /** it-IT_FrancescaVoice. */ String IT_IT_FRANCESCAVOICE = "it-IT_FrancescaVoice"; /** it-IT_FrancescaV3Voice. */ String IT_IT_FRANCESCAV3VOICE = "it-IT_FrancescaV3Voice"; /** ja-JP_EmiVoice. */ String JA_JP_EMIVOICE = "ja-JP_EmiVoice"; /** ja-JP_EmiV3Voice. */ String JA_JP_EMIV3VOICE = "ja-JP_EmiV3Voice"; /** ko-KR_HyunjunVoice. */ String KO_KR_HYUNJUNVOICE = "ko-KR_HyunjunVoice"; /** ko-KR_SiWooVoice. */ String KO_KR_SIWOOVOICE = "ko-KR_SiWooVoice"; /** ko-KR_YoungmiVoice. */ String KO_KR_YOUNGMIVOICE = "ko-KR_YoungmiVoice"; /** ko-KR_YunaVoice. */ String KO_KR_YUNAVOICE = "ko-KR_YunaVoice"; /** nl-BE_AdeleVoice. */ String NL_BE_ADELEVOICE = "nl-BE_AdeleVoice"; /** nl-NL_EmmaVoice. */ String NL_NL_EMMAVOICE = "nl-NL_EmmaVoice"; /** nl-NL_LiamVoice. */ String NL_NL_LIAMVOICE = "nl-NL_LiamVoice"; /** pt-BR_IsabelaVoice. */ String PT_BR_ISABELAVOICE = "pt-BR_IsabelaVoice"; /** pt-BR_IsabelaV3Voice. */ String PT_BR_ISABELAV3VOICE = "pt-BR_IsabelaV3Voice"; /** zh-CN_LiNaVoice. */ String ZH_CN_LINAVOICE = "zh-CN_LiNaVoice"; /** zh-CN_WangWeiVoice. */ String ZH_CN_WANGWEIVOICE = "zh-CN_WangWeiVoice"; /** zh-CN_ZhangJingVoice. */ String ZH_CN_ZHANGJINGVOICE = "zh-CN_ZhangJingVoice"; } protected String text; protected String accept; protected String voice; protected String customizationId; protected List<String> timings; /** Builder. */ public static class Builder { private String text; private String accept; private String voice; private String customizationId; protected List<String> timings; private Builder(SynthesizeOptions synthesizeOptions) { this.text = synthesizeOptions.text; this.accept = synthesizeOptions.accept; this.voice = synthesizeOptions.voice; this.customizationId = synthesizeOptions.customizationId; this.timings = synthesizeOptions.timings; } /** Instantiates a new builder. */ public Builder() {} /** * Instantiates a new builder with required properties. * * @param text the text */ public Builder(String text) { this.text = text; } /** * Builds a SynthesizeOptions. * * @return the new SynthesizeOptions instance */ public SynthesizeOptions build() { return new SynthesizeOptions(this); } /** * Set the text. * * @param text the text * @return the SynthesizeOptions builder */ public Builder text(String text) { this.text = text; return this; } /** * Set the accept. * * @param accept the accept * @return the SynthesizeOptions builder */ public Builder accept(String accept) { this.accept = accept; return this; } /** * Set the voice. * * @param voice the voice * @return the SynthesizeOptions builder */ public Builder voice(String voice) { this.voice = voice; return this; } /** * Set the customizationId. * * @param customizationId the customizationId * @return the SynthesizeOptions builder */ public Builder customizationId(String customizationId) { this.customizationId = customizationId; return this; } /** * Set the timings. * * @param timings the timings * @return the SynthesizeOptions builder */ public Builder timings(List<String> timings) { this.timings = timings; return this; } } protected SynthesizeOptions(Builder builder) { com.ibm.cloud.sdk.core.util.Validator.notNull(builder.text, "text cannot be null"); text = builder.text; accept = builder.accept; voice = builder.voice; customizationId = builder.customizationId; timings = builder.timings; } /** * New builder. * * @return a SynthesizeOptions builder */ public Builder newBuilder() { return new Builder(this); } /** * Gets the text. * * <p>The text to synthesize. * * @return the text */ public String text() { return text; } /** * Gets the accept. * * <p>The requested format (MIME type) of the audio. You can use the `Accept` header or the * `accept` parameter to specify the audio format. For more information about specifying an audio * format, see **Audio formats (accept types)** in the method description. * * @return the accept */ public String accept() { return accept; } /** * Gets the voice. * * <p>The voice to use for synthesis. For more information about specifying a voice, see * **Important voice updates for IBM Cloud** in the method description. * * <p>**IBM Cloud:** The Arabic, Chinese, Dutch, Australian English, and Korean languages and * voices are supported only for IBM Cloud. * * <p>**See also:** See also [Using languages and * voices](https://cloud.ibm.com/docs/text-to-speech?topic=text-to-speech-voices). * * @return the voice */ public String voice() { return voice; } /** * Gets the customizationId. * * <p>The customization ID (GUID) of a custom model to use for the synthesis. If a custom model is * specified, it works only if it matches the language of the indicated voice. You must make the * request with credentials for the instance of the service that owns the custom model. Omit the * parameter to use the specified voice with no customization. * * @return the customizationId */ public String customizationId() { return customizationId; } /** * Gets the timings. * * <p>An array that specifies whether the service is to return word timing information for all * strings of the input text. Specify `words` as the element of the array to request word timing * information. The service returns the start and end time of each word of the input. Specify an * empty array or omit the parameter to receive no word timing information. Not supported for * Japanese input text. * * <p>NOTE: This parameter only works for the `synthesizeUsingWebSocket` method. * * @return the timings */ public List<String> getTimings() { return timings; } }
package org.apache.solr.util; import java.util.Arrays; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** A native long priority queue. * * @lucene.internal */ public class LongPriorityQueue { protected int size; // number of elements currently in the queue protected int currentCapacity; // number of elements the queue can hold w/o expanding protected int maxSize; // max number of elements allowed in the queue protected long[] heap; protected final long sentinel; // represents a null return value public LongPriorityQueue(int initialSize, int maxSize, long sentinel) { this.maxSize = maxSize; this.sentinel = sentinel; initialize(initialSize); } protected void initialize(int sz) { int heapSize; if (0 == sz) // We allocate 1 extra to avoid if statement in top() heapSize = 2; else { // NOTE: we add +1 because all access to heap is // 1-based not 0-based. heap[0] is unused. heapSize = Math.max(sz, sz + 1); // handle overflow } heap = new long[heapSize]; currentCapacity = sz; } public int getCurrentCapacity() { return currentCapacity; } public void resize(int sz) { int heapSize; if (sz > maxSize) { maxSize = sz; } if (0 == sz) // We allocate 1 extra to avoid if statement in top() heapSize = 2; else { heapSize = Math.max(sz, sz + 1); // handle overflow } heap = Arrays.copyOf(heap, heapSize); currentCapacity = sz; } /** * Adds an object to a PriorityQueue in log(size) time. If one tries to add * more objects than maxSize from initialize an * {@link ArrayIndexOutOfBoundsException} is thrown. * * @return the new 'top' element in the queue. */ public long add(long element) { if (size >= currentCapacity) { int newSize = Math.min(currentCapacity <<1, maxSize); if (newSize < currentCapacity) newSize = Integer.MAX_VALUE; // handle overflow resize(newSize); } size++; heap[size] = element; upHeap(); return heap[1]; } /** * Adds an object to a PriorityQueue in log(size) time. If one tries to add * more objects than the current capacity, an * {@link ArrayIndexOutOfBoundsException} is thrown. */ public void addNoCheck(long element) { ++size; heap[size] = element; upHeap(); } /** * Adds an object to a PriorityQueue in log(size) time. * It returns the smallest object (if any) that was * dropped off the heap because it was full, or * the sentinel value. * * This can be * the given parameter (in case it is smaller than the * full heap's minimum, and couldn't be added), or another * object that was previously the smallest value in the * heap and now has been replaced by a larger one, or null * if the queue wasn't yet full with maxSize elements. */ public long insertWithOverflow(long element) { if (size < maxSize) { add(element); return sentinel; } else if (element > heap[1]) { long ret = heap[1]; heap[1] = element; updateTop(); return ret; } else { return element; } } /** inserts the element and returns true if this element caused another element * to be dropped from the queue. */ public boolean insert(long element) { if (size < maxSize) { add(element); return false; } else if (element > heap[1]) { // long ret = heap[1]; heap[1] = element; updateTop(); return true; } else { return false; } } /** Returns the least element of the PriorityQueue in constant time. */ public long top() { return heap[1]; } /** Removes and returns the least element of the PriorityQueue in log(size) time. Only valid if size() &gt; 0. */ public long pop() { long result = heap[1]; // save first value heap[1] = heap[size]; // move last to first size--; downHeap(); // adjust heap return result; } /** * Should be called when the Object at top changes values. * @return the new 'top' element. */ public long updateTop() { downHeap(); return heap[1]; } /** Returns the number of elements currently stored in the PriorityQueue. */ public int size() { return size; } /** Returns the array used to hold the heap, with the smallest item at array[1] * and the last (but not necessarily largest) at array[size()]. This is *not* * fully sorted. */ public long[] getInternalArray() { return heap; } /** Pops the smallest n items from the heap, placing them in the internal array at * arr[size] through arr[size-(n-1)] with the smallest (first element popped) * being at arr[size]. The internal array is returned. */ public long[] sort(int n) { while (--n >= 0) { long result = heap[1]; // save first value heap[1] = heap[size]; // move last to first heap[size] = result; // place it last size--; downHeap(); // adjust heap } return heap; } /** Removes all entries from the PriorityQueue. */ public void clear() { size = 0; } private void upHeap() { int i = size; long node = heap[i]; // save bottom node int j = i >>> 1; while (j > 0 && node < heap[j]) { heap[i] = heap[j]; // shift parents down i = j; j = j >>> 1; } heap[i] = node; // install saved node } private void downHeap() { int i = 1; long node = heap[i]; // save top node int j = i << 1; // find smaller child int k = j + 1; if (k <= size && heap[k] < heap[j]) { j = k; } while (j <= size && heap[j] < node) { heap[i] = heap[j]; // shift up child i = j; j = i << 1; k = j + 1; if (k <= size && heap[k] < heap[j]) { j = k; } } heap[i] = node; // install saved node } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.near; import java.io.Externalizable; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockRequest; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Near cache lock request to primary node. 'Near' means 'Initiating node' here, not 'Near Cache'. */ public class GridNearLockRequest extends GridDistributedLockRequest { /** */ private static final long serialVersionUID = 0L; /** */ private static final int NEED_RETURN_VALUE_FLAG_MASK = 0x01; /** */ private static final int FIRST_CLIENT_REQ_FLAG_MASK = 0x02; /** */ private static final int SYNC_COMMIT_FLAG_MASK = 0x04; /** */ private static final int NEAR_CACHE_FLAG_MASK = 0x08; /** Topology version. */ private AffinityTopologyVersion topVer; /** Mini future ID. */ private int miniId; /** Filter. */ private CacheEntryPredicate[] filter; /** Array of mapped DHT versions for this entry. */ @GridToStringInclude private GridCacheVersion[] dhtVers; /** Task name hash. */ private int taskNameHash; /** TTL for create operation. */ private long createTtl; /** TTL for read operation. */ private long accessTtl; /** */ private byte flags; /** Transaction label. */ private String txLbl; /** * Empty constructor required for {@link Externalizable}. */ public GridNearLockRequest() { // No-op. } /** * @param cacheId Cache ID. * @param topVer Topology version. * @param nodeId Node ID. * @param threadId Thread ID. * @param futId Future ID. * @param lockVer Cache version. * @param isInTx {@code True} if implicit transaction lock. * @param isRead Indicates whether implicit lock is for read or write operation. * @param retVal Return value flag. * @param isolation Transaction isolation. * @param isInvalidate Invalidation flag. * @param timeout Lock timeout. * @param keyCnt Number of keys. * @param txSize Expected transaction size. * @param syncCommit Synchronous commit flag. * @param taskNameHash Task name hash code. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param skipStore Skip store flag. * @param firstClientReq {@code True} if first lock request for lock operation sent from client node. * @param addDepInfo Deployment info flag. * @param txLbl Transaction label. */ public GridNearLockRequest( int cacheId, @NotNull AffinityTopologyVersion topVer, UUID nodeId, long threadId, IgniteUuid futId, GridCacheVersion lockVer, boolean isInTx, boolean isRead, boolean retVal, TransactionIsolation isolation, boolean isInvalidate, long timeout, int keyCnt, int txSize, boolean syncCommit, int taskNameHash, long createTtl, long accessTtl, boolean skipStore, boolean keepBinary, boolean firstClientReq, boolean nearCache, boolean addDepInfo, @Nullable String txLbl ) { super( cacheId, nodeId, lockVer, threadId, futId, lockVer, isInTx, isRead, isolation, isInvalidate, timeout, keyCnt, txSize, skipStore, keepBinary, addDepInfo); assert topVer.compareTo(AffinityTopologyVersion.ZERO) > 0; this.topVer = topVer; this.taskNameHash = taskNameHash; this.createTtl = createTtl; this.accessTtl = accessTtl; this.txLbl = txLbl; dhtVers = new GridCacheVersion[keyCnt]; setFlag(syncCommit, SYNC_COMMIT_FLAG_MASK); setFlag(firstClientReq, FIRST_CLIENT_REQ_FLAG_MASK); setFlag(retVal, NEED_RETURN_VALUE_FLAG_MASK); setFlag(nearCache, NEAR_CACHE_FLAG_MASK); } /** * @return {@code True} if near cache enabled on originating node. */ public boolean nearCache() { return isFlag(NEAR_CACHE_FLAG_MASK); } /** * Sets flag mask. * * @param flag Set or clear. * @param mask Mask. */ private void setFlag(boolean flag, int mask) { flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask); } /** * Reags flag mask. * * @param mask Mask to read. * @return Flag value. */ private boolean isFlag(int mask) { return (flags & mask) != 0; } /** * @return {@code True} if first lock request for lock operation sent from client node. */ public boolean firstClientRequest() { return isFlag(FIRST_CLIENT_REQ_FLAG_MASK); } /** * @return Topology version. */ @Override public AffinityTopologyVersion topologyVersion() { return topVer; } /** * @param topVer Topology version. */ public void topologyVersion(AffinityTopologyVersion topVer) { this.topVer = topVer; } /** * @return Task name hash.q */ public int taskNameHash() { return taskNameHash; } /** * @return Sync commit flag. */ public boolean syncCommit() { return isFlag(SYNC_COMMIT_FLAG_MASK); } /** * @return Filter. */ public CacheEntryPredicate[] filter() { return filter; } /** * @param filter Filter. * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void filter(CacheEntryPredicate[] filter, GridCacheContext ctx) throws IgniteCheckedException { this.filter = filter; } /** * @return Mini future ID. */ public int miniId() { return miniId; } /** * @param miniId Mini future Id. */ public void miniId(int miniId) { this.miniId = miniId; } /** * @return Need return value flag. */ public boolean needReturnValue() { return isFlag(NEED_RETURN_VALUE_FLAG_MASK); } /** * Adds a key. * * @param key Key. * @param retVal Flag indicating whether value should be returned. * @param dhtVer DHT version. * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void addKeyBytes( KeyCacheObject key, boolean retVal, @Nullable GridCacheVersion dhtVer, GridCacheContext ctx ) throws IgniteCheckedException { dhtVers[idx] = dhtVer; // Delegate to super. addKeyBytes(key, retVal, ctx); } /** * @param idx Index of the key. * @return DHT version for key at given index. */ public GridCacheVersion dhtVersion(int idx) { return dhtVers[idx]; } /** * @return New TTL to set after entry is created, -1 to leave unchanged. */ public long createTtl() { return createTtl; } /** * @return TTL for read operation. */ public long accessTtl() { return accessTtl; } /** * @return Transaction label. */ @Nullable public String txLabel() { return txLbl; } /** {@inheritDoc} */ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (filter != null) { GridCacheContext cctx = ctx.cacheContext(cacheId); for (CacheEntryPredicate p : filter) { if (p != null) p.prepareMarshal(cctx); } } } /** {@inheritDoc} */ @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (filter != null) { GridCacheContext cctx = ctx.cacheContext(cacheId); for (CacheEntryPredicate p : filter) { if (p != null) p.finishUnmarshal(cctx, ldr); } } } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 21: if (!writer.writeLong("accessTtl", accessTtl)) return false; writer.incrementState(); case 22: if (!writer.writeLong("createTtl", createTtl)) return false; writer.incrementState(); case 23: if (!writer.writeObjectArray("dhtVers", dhtVers, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 24: if (!writer.writeObjectArray("filter", filter, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 25: if (!writer.writeByte("flags", flags)) return false; writer.incrementState(); case 26: if (!writer.writeInt("miniId", miniId)) return false; writer.incrementState(); case 27: if (!writer.writeInt("taskNameHash", taskNameHash)) return false; writer.incrementState(); case 28: if (!writer.writeAffinityTopologyVersion("topVer", topVer)) return false; writer.incrementState(); case 29: if (!writer.writeString("txLbl", txLbl)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 21: accessTtl = reader.readLong("accessTtl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 22: createTtl = reader.readLong("createTtl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 23: dhtVers = reader.readObjectArray("dhtVers", MessageCollectionItemType.MSG, GridCacheVersion.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 24: filter = reader.readObjectArray("filter", MessageCollectionItemType.MSG, CacheEntryPredicate.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 25: flags = reader.readByte("flags"); if (!reader.isLastRead()) return false; reader.incrementState(); case 26: miniId = reader.readInt("miniId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 27: taskNameHash = reader.readInt("taskNameHash"); if (!reader.isLastRead()) return false; reader.incrementState(); case 28: topVer = reader.readAffinityTopologyVersion("topVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 29: txLbl = reader.readString("txLbl"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridNearLockRequest.class); } /** {@inheritDoc} */ @Override public short directType() { return 51; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 30; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridNearLockRequest.class, this, "filter", Arrays.toString(filter), "super", super.toString()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl3; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import static org.junit.Assert.assertEquals; import org.junit.Test; /** * Test cases for reported issue between JEXL-300 and JEXL-399. */ public class Issues300Test { @Test public void testIssue301a() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(false).arithmetic(new JexlArithmetic(false)).create(); final String[] srcs = new String[]{ "var x = null; x.0", "var x = null; x[0]", "var x = [null,1]; x[0][0]" }; for (int i = 0; i < srcs.length; ++i) { final String src = srcs[i]; final JexlScript s = jexl.createScript(src); try { final Object o = s.execute(null); if (i > 0) { Assert.fail(src + ": Should have failed"); } } catch (final Exception ex) { Assert.assertTrue(ex.getMessage().contains("x")); } } } @Test public void testIssues301b() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(false).arithmetic(new JexlArithmetic(false)).create(); final Object[] xs = new Object[]{null, null, new Object[]{null, 1}}; final String[] srcs = new String[]{ "x.0", "x[0]", "x[0][0]" }; final JexlContext ctxt = new MapContext(); for (int i = 0; i < xs.length; ++i) { ctxt.set("x", xs[i]); final String src = srcs[i]; final JexlScript s = jexl.createScript(src); try { final Object o = s.execute(null); Assert.fail(src + ": Should have failed"); } catch (final Exception ex) { // } } } @Test public void testIssue302() throws Exception { final JexlContext jc = new MapContext(); final String[] strs = new String[]{ "{if (0) 1 else 2; var x = 4;}", "if (0) 1; else 2; ", "{ if (0) 1; else 2; }", "{ if (0) { if (false) 1 else -3 } else 2; }" }; final JexlEngine jexl = new JexlBuilder().create(); for (final String str : strs) { final JexlScript e = jexl.createScript(str); final Object o = e.execute(jc); final int oo = ((Number) o).intValue() % 2; Assert.assertEquals("Block result is wrong " + str, 0, oo); } } @Test public void testIssue304() { final JexlEngine jexlEngine = new JexlBuilder().strict(false).create(); JexlExpression e304 = jexlEngine.createExpression("overview.limit.var"); final HashMap<String, Object> map3 = new HashMap<String, Object>(); map3.put("var", "4711"); final HashMap<String, Object> map2 = new HashMap<String, Object>(); map2.put("limit", map3); final HashMap<String, Object> map = new HashMap<String, Object>(); map.put("overview", map2); final JexlContext context = new MapContext(map); Object value = e304.evaluate(context); assertEquals("4711", value); // fails map.clear(); map.put("overview.limit.var", 42); value = e304.evaluate(context); assertEquals(42, value); final String allkw = "e304.if.else.do.while.new.true.false.null.var.function.empty.size.not.and.or.ne.eq.le.lt.gt.ge"; map.put(allkw, 42); e304 = jexlEngine.createExpression(allkw); value = e304.evaluate(context); assertEquals(42, value); } @Test public void testIssue305() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); JexlScript e; e = jexl.createScript("{while(false) {}; var x = 1;}"); final String str0 = e.getParsedText(); e = jexl.createScript(str0); Assert.assertNotNull(e); final String str1 = e.getParsedText(); Assert.assertEquals(str0, str1); } @Test public void testIssue306() throws Exception { final JexlContext ctxt = new MapContext(); final JexlEngine jexl = new JexlBuilder().create(); final JexlScript e = jexl.createScript("x.y ?: 2"); final Object o1 = e.execute(null); Assert.assertEquals(2, o1); ctxt.set("x.y", null); final Object o2 = e.execute(ctxt); Assert.assertEquals(2, o2); } @Test public void testIssue306a() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); final JexlScript e = jexl.createScript("x.y ?: 2", "x"); Object o = e.execute(null, new Object()); Assert.assertEquals(2, o); o = e.execute(null); Assert.assertEquals(2, o); } @Test public void testIssue306b() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); final JexlScript e = jexl.createScript("x?.y ?: 2", "x"); final Object o1 = e.execute(null, new Object()); Assert.assertEquals(2, o1); final Object o2 = e.execute(null); Assert.assertEquals(2, o2); } @Test public void testIssue306c() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(true).create(); final JexlScript e = jexl.createScript("x.y ?: 2", "x"); Object o = e.execute(null, new Object()); Assert.assertEquals(2, o); o = e.execute(null); Assert.assertEquals(2, o); } @Test public void testIssue306d() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(true).create(); final JexlScript e = jexl.createScript("x.y[z.t] ?: 2", "x"); Object o = e.execute(null, new Object()); Assert.assertEquals(2, o); o = e.execute(null); Assert.assertEquals(2, o); } @Test public void testIssue309a() throws Exception { final String src = "<html lang=\"en\">\n" + " <body>\n" + " <h1>Hello World!</h1>\n" + "$$ var i = 12++;\n" + " </body>\n" + "</html>"; final JexlEngine jexl = new JexlBuilder().safe(true).create(); final JxltEngine jxlt = jexl.createJxltEngine(); final JexlInfo info = new JexlInfo("template", 1, 1); try { final JxltEngine.Template tmplt = jxlt.createTemplate(info, src); Assert.fail("shoud have thrown exception"); } catch (final JexlException.Parsing xerror) { Assert.assertEquals(4, xerror.getInfo().getLine()); } } @Test public void testIssue309b() throws Exception { final String src = "<html lang=\"en\">\n" + " <body>\n" + " <h1>Hello World!</h1>\n" + "$$ var i = a b c;\n" + " </body>\n" + "</html>"; final JexlEngine jexl = new JexlBuilder().safe(true).create(); final JxltEngine jxlt = jexl.createJxltEngine(); final JexlInfo info = new JexlInfo("template", 1, 1); try { final JxltEngine.Template tmplt = jxlt.createTemplate(info, src); Assert.fail("shoud have thrown exception"); } catch (final JexlException.Parsing xerror) { Assert.assertEquals(4, xerror.getInfo().getLine()); } } @Test public void testIssue309c() throws Exception { final String src = "<html lang=\"en\">\n" + " <body>\n" + " <h1>Hello World!</h1>\n" + "$$ var i =12;\n" + " </body>\n" + "</html>"; final JexlEngine jexl = new JexlBuilder().safe(true).create(); final JxltEngine jxlt = jexl.createJxltEngine(); final JexlInfo info = new JexlInfo("template", 1, 1); try { final JxltEngine.Template tmplt = jxlt.createTemplate(info, src); final String src1 = tmplt.asString(); final String src2 = tmplt.toString(); Assert.assertEquals(src1, src2); } catch (final JexlException.Parsing xerror) { Assert.assertEquals(4, xerror.getInfo().getLine()); } } public static class VaContext extends MapContext { VaContext(final Map<String, Object> vars) { super(vars); } public int cell(final String... ms) { return ms.length; } public int cell(final List<?> l, final String... ms) { return 42 + cell(ms); } } @Test public void test314() throws Exception { final JexlEngine jexl = new JexlBuilder().strict(true).safe(false).create(); final Map<String, Object> vars = new HashMap<String, Object>(); final JexlContext ctxt = new VaContext(vars); JexlScript script; Object result; script = jexl.createScript("cell()"); result = script.execute(ctxt); Assert.assertEquals(0, result); script = jexl.createScript("x.cell()", "x"); result = script.execute(ctxt, Arrays.asList(10, 20)); Assert.assertEquals(42, result); script = jexl.createScript("cell('1', '2')"); result = script.execute(ctxt); Assert.assertEquals(2, result); script = jexl.createScript("x.cell('1', '2')", "x"); result = script.execute(ctxt, Arrays.asList(10, 20)); Assert.assertEquals(44, result); vars.put("TVALOGAR", null); String jexlExp = "TVALOGAR==null?'SIMON':'SIMONAZO'"; script = jexl.createScript(jexlExp); result = script.execute(ctxt); Assert.assertEquals("SIMON", result); jexlExp = "TVALOGAR.PEPITO==null?'SIMON':'SIMONAZO'"; script = jexl.createScript(jexlExp); final Map<String, Object> tva = new LinkedHashMap<String, Object>(); tva.put("PEPITO", null); vars.put("TVALOGAR", tva); result = script.execute(ctxt); Assert.assertEquals("SIMON", result); vars.remove("TVALOGAR"); ctxt.set("TVALOGAR.PEPITO", null); result = script.execute(ctxt); Assert.assertEquals("SIMON", result); } @Test public void test315() throws Exception { final JexlEngine jexl = new JexlBuilder().strict(true).create(); final Map<String, Object> vars = new HashMap<String, Object>(); final JexlContext ctxt = new VaContext(vars); JexlScript script; Object result; script = jexl.createScript("a?? 42 + 10", "a"); result = script.execute(ctxt, 32); Assert.assertEquals(32, result); result = script.execute(ctxt, (Object) null); Assert.assertEquals(52, result); script = jexl.createScript("- a??42 + +10", "a"); result = script.execute(ctxt, 32); Assert.assertEquals(-32, result); result = script.execute(ctxt, (Object) null); Assert.assertEquals(52, result); // long version of ternary script = jexl.createScript("a? a : +42 + 10", "a"); result = script.execute(ctxt, 32); Assert.assertEquals(32, result); result = script.execute(ctxt, (Object) null); Assert.assertEquals(52, result); // short one, elvis, equivalent script = jexl.createScript("a ?: +42 + 10", "a"); result = script.execute(ctxt, 32); Assert.assertEquals(32, result); result = script.execute(ctxt, (Object) null); Assert.assertEquals(52, result); } @Test public void test317() throws Exception { final JexlEngine jexl = new JexlBuilder().strict(true).create(); final JexlContext ctxt = new MapContext(); JexlScript script; Object result; JexlInfo info = new JexlInfo("test317", 1, 1); script = jexl.createScript(info, "var f = " + "()-> {x + x }; f", "x"); result = script.execute(ctxt, 21); Assert.assertTrue(result instanceof JexlScript); script = (JexlScript) result; info = JexlInfo.from(script); Assert.assertNotNull(info); Assert.assertEquals("test317", info.getName()); result = script.execute(ctxt, 21); Assert.assertEquals(42, result); } @Test public void test322a() throws Exception { final JexlEngine jexl = new JexlBuilder().strict(true).create(); final JxltEngine jxlt = jexl.createJxltEngine(); final JexlContext context = new MapContext(); final String[] ins = new String[]{ "${'{'}", "${\"{\"}", "${\"{}\"}", "${'{42}'}", "${\"{\\\"\\\"}\"}" }; final String[] ctls = new String[]{ "{", "{", "{}", "{42}", "{\"\"}" }; StringWriter strw; JxltEngine.Template template; String output; for (int i = 0; i < ins.length; ++i) { final String src = ins[i]; try { template = jxlt.createTemplate("$$", new StringReader(src)); } catch (final JexlException xany) { Assert.fail(src); throw xany; } strw = new StringWriter(); template.evaluate(context, strw); output = strw.toString(); Assert.assertEquals(ctls[i], output); } } public static class User322 { public String getName() { return "user322"; } } public static class Session322 { public User322 getUser() { return new User322(); } } @Test public void test322b() throws Exception { final MapContext ctxt = new MapContext(); final String src = "L'utilisateur ${session.user.name} s'est connecte"; final JexlEngine jexl = new JexlBuilder().strict(true).create(); final JxltEngine jxlt = jexl.createJxltEngine(); StringWriter strw; JxltEngine.Template template; String output; template = jxlt.createTemplate("$$", new StringReader(src)); ctxt.set("session", new Session322()); strw = new StringWriter(); template.evaluate(ctxt, strw); output = strw.toString(); Assert.assertEquals("L'utilisateur user322 s'est connecte", output); ctxt.set("session.user", new User322()); strw = new StringWriter(); template.evaluate(ctxt, strw); output = strw.toString(); Assert.assertEquals("L'utilisateur user322 s'est connecte", output); ctxt.set("session.user.name", "user322"); strw = new StringWriter(); template.evaluate(ctxt, strw); output = strw.toString(); Assert.assertEquals("L'utilisateur user322 s'est connecte", output); } @Test public void test323() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(false).create(); final Map<String, Object> vars = new HashMap<String, Object>(); final JexlContext jc = new MapContext(vars); JexlScript script; Object result; // nothing in context, ex try { script = jexl.createScript("a.n.t.variable"); result = script.execute(jc); Assert.fail("a.n.t.variable is undefined!"); } catch (final JexlException.Variable xvar) { Assert.assertTrue(xvar.toString().contains("a.n.t")); } // defined and null jc.set("a.n.t.variable", null); script = jexl.createScript("a.n.t.variable"); result = script.execute(jc); Assert.assertNull(result); // defined and null, dereference jc.set("a.n.t", null); try { script = jexl.createScript("a.n.t[0].variable"); result = script.execute(jc); Assert.fail("a.n.t is null!"); } catch (final JexlException.Variable xvar) { Assert.assertTrue(xvar.toString().contains("a.n.t")); } // undefined, dereference vars.remove("a.n.t"); try { script = jexl.createScript("a.n.t[0].variable"); result = script.execute(jc); Assert.fail("a.n.t is undefined!"); } catch (final JexlException.Variable xvar) { Assert.assertTrue(xvar.toString().contains("a.n.t")); } // defined, derefence undefined property final List<Object> inner = new ArrayList<Object>(); vars.put("a.n.t", inner); try { script = jexl.createScript("a.n.t[0].variable"); result = script.execute(jc); Assert.fail("a.n.t is null!"); } catch (final JexlException.Property xprop) { Assert.assertTrue(xprop.toString().contains("0")); } // defined, derefence undefined property inner.add(42); try { script = jexl.createScript("a.n.t[0].variable"); result = script.execute(jc); Assert.fail("a.n.t is null!"); } catch (final JexlException.Property xprop) { Assert.assertTrue(xprop.toString().contains("variable")); } } @Test public void test324() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); final String src42 = "new('java.lang.Integer', 42)"; final JexlExpression expr0 = jexl.createExpression(src42); Assert.assertEquals(42, expr0.evaluate(null)); final String parsed = expr0.getParsedText(); Assert.assertEquals(src42, parsed); try { final JexlExpression expr = jexl.createExpression("new()"); Assert.fail("should not parse"); } catch (final JexlException.Parsing xparse) { Assert.assertTrue(xparse.toString().contains(")")); } } @Test public void test325() throws Exception { final JexlEngine jexl = new JexlBuilder().safe(false).create(); final Map<String, Object> map = new HashMap<String, Object>() { @Override public Object get(final Object key) { return super.get(key == null ? "" : key); } @Override public Object put(final String key, final Object value) { return super.put(key == null ? "" : key, value); } }; map.put("42", 42); final JexlContext jc = new MapContext(); JexlScript script; Object result; script = jexl.createScript("map[null] = 42", "map"); result = script.execute(jc, map); Assert.assertEquals(42, result); script = jexl.createScript("map[key]", "map", "key"); result = script.execute(jc, map, null); Assert.assertEquals(42, result); result = script.execute(jc, map, "42"); Assert.assertEquals(42, result); } @Test public void test330() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); // Extended form of: 'literal' + VARIABLE 'literal' // missing + operator here ---------------^ final String longExpression = "" + // "'THIS IS A VERY VERY VERY VERY VERY VERY VERY " + // "VERY VERY LONG STRING CONCATENATION ' + VARIABLE ' <--- " + // "error: missing + between VARIABLE and literal'"; try { jexl.createExpression(longExpression); Assert.fail("parsing malformed expression did not throw exception"); } catch (final JexlException.Parsing exception) { Assert.assertTrue(exception.getMessage().contains("VARIABLE")); } } @Test public void test331() throws Exception { final JexlEngine jexl = new JexlBuilder().create(); final JexlContext ctxt = new MapContext(); JexlScript script; Object result; script = jexl.createScript("a + '\\n' + b", "a", "b"); result = script.execute(ctxt, "hello", "world"); Assert.assertTrue(result.toString().contains("\n")); } @Test public void test347() throws Exception { final String src = "A.B == 5"; JexlEngine jexl = new JexlBuilder().safe(true).create(); JexlScript script = jexl.createScript(src); Object result = script.execute(null); // safe navigation is lenient wrt null Assert.assertFalse((Boolean) result); jexl = new JexlBuilder().strict(true).safe(false).create(); JexlContext ctxt = new MapContext(); script = jexl.createScript(src); // A and A.B undefined try { result = script.execute(ctxt); Assert.fail("should only succeed with safe navigation"); } catch (JexlException xany) { Assert.assertNotNull(xany); } // A is null, A.B is undefined ctxt.set("A", null); try { result = script.execute(ctxt); Assert.fail("should only succeed with safe navigation"); } catch (JexlException xany) { Assert.assertNotNull(xany); } // A.B is null ctxt.set("A.B", null); result = script.execute(ctxt); Assert.assertFalse((Boolean) result); } @Test public void test349() throws Exception { String text = "(A ? C.D : E)"; JexlEngine jexl = new JexlBuilder().safe(true).create(); JexlExpression expr = jexl.createExpression(text); JexlScript script = jexl.createScript(text); } }
/* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.access.method; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.security.access.ConfigAttribute; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; /** * Stores a list of <tt>ConfigAttribute</tt>s for a method or class signature. * * <p> * This class is the preferred implementation of {@link MethodSecurityMetadataSource} for * XML-based definition of method security metadata. To assist in XML-based definition, * wildcard support is provided. * </p> * * @author Ben Alex * @since 2.0 */ public class MapBasedMethodSecurityMetadataSource extends AbstractFallbackMethodSecurityMetadataSource implements BeanClassLoaderAware { // ~ Instance fields // ================================================================================================ private ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader(); /** Map from RegisteredMethod to ConfigAttribute list */ protected final Map<RegisteredMethod, List<ConfigAttribute>> methodMap = new HashMap<RegisteredMethod, List<ConfigAttribute>>(); /** Map from RegisteredMethod to name pattern used for registration */ private final Map<RegisteredMethod, String> nameMap = new HashMap<RegisteredMethod, String>(); // ~ Methods // ======================================================================================================== public MapBasedMethodSecurityMetadataSource() { } /** * Creates the <tt>MapBasedMethodSecurityMetadataSource</tt> from a * @param methodMap map of method names to <tt>ConfigAttribute</tt>s. */ public MapBasedMethodSecurityMetadataSource( Map<String, List<ConfigAttribute>> methodMap) { for (Map.Entry<String, List<ConfigAttribute>> entry : methodMap.entrySet()) { addSecureMethod(entry.getKey(), entry.getValue()); } } /** * Implementation does not support class-level attributes. */ protected Collection<ConfigAttribute> findAttributes(Class<?> clazz) { return null; } /** * Will walk the method inheritance tree to find the most specific declaration * applicable. */ protected Collection<ConfigAttribute> findAttributes(Method method, Class<?> targetClass) { if (targetClass == null) { return null; } return findAttributesSpecifiedAgainst(method, targetClass); } private List<ConfigAttribute> findAttributesSpecifiedAgainst(Method method, Class<?> clazz) { RegisteredMethod registeredMethod = new RegisteredMethod(method, clazz); if (methodMap.containsKey(registeredMethod)) { return (List<ConfigAttribute>) methodMap.get(registeredMethod); } // Search superclass if (clazz.getSuperclass() != null) { return findAttributesSpecifiedAgainst(method, clazz.getSuperclass()); } return null; } /** * Add configuration attributes for a secure method. Method names can end or start * with <code>*</code> for matching multiple methods. * * @param name type and method name, separated by a dot * @param attr the security attributes associated with the method */ private void addSecureMethod(String name, List<ConfigAttribute> attr) { int lastDotIndex = name.lastIndexOf("."); if (lastDotIndex == -1) { throw new IllegalArgumentException("'" + name + "' is not a valid method name: format is FQN.methodName"); } String methodName = name.substring(lastDotIndex + 1); Assert.hasText(methodName, "Method not found for '" + name + "'"); String typeName = name.substring(0, lastDotIndex); Class<?> type = ClassUtils.resolveClassName(typeName, this.beanClassLoader); addSecureMethod(type, methodName, attr); } /** * Add configuration attributes for a secure method. Mapped method names can end or * start with <code>*</code> for matching multiple methods. * * @param javaType target interface or class the security configuration attribute * applies to * @param mappedName mapped method name, which the javaType has declared or inherited * @param attr required authorities associated with the method */ public void addSecureMethod(Class<?> javaType, String mappedName, List<ConfigAttribute> attr) { String name = javaType.getName() + '.' + mappedName; if (logger.isDebugEnabled()) { logger.debug("Request to add secure method [" + name + "] with attributes [" + attr + "]"); } Method[] methods = javaType.getMethods(); List<Method> matchingMethods = new ArrayList<Method>(); for (Method m : methods) { if (m.getName().equals(mappedName) || isMatch(m.getName(), mappedName)) { matchingMethods.add(m); } } if (matchingMethods.isEmpty()) { throw new IllegalArgumentException("Couldn't find method '" + mappedName + "' on '" + javaType + "'"); } // register all matching methods for (Method method : matchingMethods) { RegisteredMethod registeredMethod = new RegisteredMethod(method, javaType); String regMethodName = (String) this.nameMap.get(registeredMethod); if ((regMethodName == null) || (!regMethodName.equals(name) && (regMethodName.length() <= name .length()))) { // no already registered method name, or more specific // method name specification now -> (re-)register method if (regMethodName != null) { logger.debug("Replacing attributes for secure method [" + method + "]: current name [" + name + "] is more specific than [" + regMethodName + "]"); } this.nameMap.put(registeredMethod, name); addSecureMethod(registeredMethod, attr); } else { logger.debug("Keeping attributes for secure method [" + method + "]: current name [" + name + "] is not more specific than [" + regMethodName + "]"); } } } /** * Adds configuration attributes for a specific method, for example where the method * has been matched using a pointcut expression. If a match already exists in the map * for the method, then the existing match will be retained, so that if this method is * called for a more general pointcut it will not override a more specific one which * has already been added. * <p> * This method should only be called during initialization of the {@code BeanFactory}. */ public void addSecureMethod(Class<?> javaType, Method method, List<ConfigAttribute> attr) { RegisteredMethod key = new RegisteredMethod(method, javaType); if (methodMap.containsKey(key)) { logger.debug("Method [" + method + "] is already registered with attributes [" + methodMap.get(key) + "]"); return; } methodMap.put(key, attr); } /** * Add configuration attributes for a secure method. * * @param method the method to be secured * @param attr required authorities associated with the method */ private void addSecureMethod(RegisteredMethod method, List<ConfigAttribute> attr) { Assert.notNull(method, "RegisteredMethod required"); Assert.notNull(attr, "Configuration attribute required"); if (logger.isInfoEnabled()) { logger.info("Adding secure method [" + method + "] with attributes [" + attr + "]"); } this.methodMap.put(method, attr); } /** * Obtains the configuration attributes explicitly defined against this bean. * * @return the attributes explicitly defined against this bean */ public Collection<ConfigAttribute> getAllConfigAttributes() { Set<ConfigAttribute> allAttributes = new HashSet<ConfigAttribute>(); for (List<ConfigAttribute> attributeList : methodMap.values()) { allAttributes.addAll(attributeList); } return allAttributes; } /** * Return if the given method name matches the mapped name. The default implementation * checks for "xxx" and "xxx" matches. * * @param methodName the method name of the class * @param mappedName the name in the descriptor * * @return if the names match */ private boolean isMatch(String methodName, String mappedName) { return (mappedName.endsWith("*") && methodName.startsWith(mappedName.substring(0, mappedName.length() - 1))) || (mappedName.startsWith("*") && methodName.endsWith(mappedName .substring(1, mappedName.length()))); } public void setBeanClassLoader(ClassLoader beanClassLoader) { Assert.notNull(beanClassLoader, "Bean class loader required"); this.beanClassLoader = beanClassLoader; } /** * @return map size (for unit tests and diagnostics) */ public int getMethodMapSize() { return methodMap.size(); } /** * Stores both the Java Method as well as the Class we obtained the Method from. This * is necessary because Method only provides us access to the declaring class. It * doesn't provide a way for us to introspect which Class the Method was registered * against. If a given Class inherits and redeclares a method (i.e. calls super();) * the registered Class and declaring Class are the same. If a given class merely * inherits but does not redeclare a method, the registered Class will be the Class * we're invoking against and the Method will provide details of the declared class. */ private static class RegisteredMethod { private final Method method; private final Class<?> registeredJavaType; public RegisteredMethod(Method method, Class<?> registeredJavaType) { Assert.notNull(method, "Method required"); Assert.notNull(registeredJavaType, "Registered Java Type required"); this.method = method; this.registeredJavaType = registeredJavaType; } public boolean equals(Object obj) { if (this == obj) { return true; } if (obj != null && obj instanceof RegisteredMethod) { RegisteredMethod rhs = (RegisteredMethod) obj; return method.equals(rhs.method) && registeredJavaType.equals(rhs.registeredJavaType); } return false; } public int hashCode() { return method.hashCode() * registeredJavaType.hashCode(); } public String toString() { return "RegisteredMethod[" + registeredJavaType.getName() + "; " + method + "]"; } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.dx.rop.annotation; import com.android.dx.rop.cst.CstString; import com.android.dx.rop.cst.CstType; import com.android.dx.util.MutabilityControl; import com.android.dx.util.ToHuman; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.TreeMap; /** * An annotation on an element of a class. Annotations have an * associated type and additionally consist of a set of (name, value) * pairs, where the names are unique. */ public final class Annotation extends MutabilityControl implements Comparable<Annotation>, ToHuman { /** {@code non-null;} type of the annotation */ private final CstType type; /** {@code non-null;} the visibility of the annotation */ private final AnnotationVisibility visibility; /** {@code non-null;} map from names to {@link NameValuePair} instances */ private final TreeMap<CstString, NameValuePair> elements; /** * Construct an instance. It initially contains no elements. * * @param type {@code non-null;} type of the annotation * @param visibility {@code non-null;} the visibility of the annotation */ public Annotation(CstType type, AnnotationVisibility visibility) { if (type == null) { throw new NullPointerException("type == null"); } if (visibility == null) { throw new NullPointerException("visibility == null"); } this.type = type; this.visibility = visibility; this.elements = new TreeMap<CstString, NameValuePair>(); } /** {@inheritDoc} */ @Override public boolean equals(Object other) { if (! (other instanceof Annotation)) { return false; } Annotation otherAnnotation = (Annotation) other; if (! (type.equals(otherAnnotation.type) && (visibility == otherAnnotation.visibility))) { return false; } return elements.equals(otherAnnotation.elements); } /** {@inheritDoc} */ public int hashCode() { int hash = type.hashCode(); hash = (hash * 31) + elements.hashCode(); hash = (hash * 31) + visibility.hashCode(); return hash; } /** {@inheritDoc} */ public int compareTo(Annotation other) { int result = type.compareTo(other.type); if (result != 0) { return result; } result = visibility.compareTo(other.visibility); if (result != 0) { return result; } Iterator<NameValuePair> thisIter = elements.values().iterator(); Iterator<NameValuePair> otherIter = other.elements.values().iterator(); while (thisIter.hasNext() && otherIter.hasNext()) { NameValuePair thisOne = thisIter.next(); NameValuePair otherOne = otherIter.next(); result = thisOne.compareTo(otherOne); if (result != 0) { return result; } } if (thisIter.hasNext()) { return 1; } else if (otherIter.hasNext()) { return -1; } return 0; } /** {@inheritDoc} */ @Override public String toString() { return toHuman(); } /** {@inheritDoc} */ public String toHuman() { StringBuilder sb = new StringBuilder(); sb.append(visibility.toHuman()); sb.append("-annotation "); sb.append(type.toHuman()); sb.append(" {"); boolean first = true; for (NameValuePair pair : elements.values()) { if (first) { first = false; } else { sb.append(", "); } sb.append(pair.getName().toHuman()); sb.append(": "); sb.append(pair.getValue().toHuman()); } sb.append("}"); return sb.toString(); } /** * Gets the type of this instance. * * @return {@code non-null;} the type */ public CstType getType() { return type; } /** * Gets the visibility of this instance. * * @return {@code non-null;} the visibility */ public AnnotationVisibility getVisibility() { return visibility; } /** * Put an element into the set of (name, value) pairs for this instance. * If there is a preexisting element with the same name, it will be * replaced by this method. * * @param pair {@code non-null;} the (name, value) pair to place into this instance */ public void put(NameValuePair pair) { throwIfImmutable(); if (pair == null) { throw new NullPointerException("pair == null"); } elements.put(pair.getName(), pair); } /** * Add an element to the set of (name, value) pairs for this instance. * It is an error to call this method if there is a preexisting element * with the same name. * * @param pair {@code non-null;} the (name, value) pair to add to this instance */ public void add(NameValuePair pair) { throwIfImmutable(); if (pair == null) { throw new NullPointerException("pair == null"); } CstString name = pair.getName(); if (elements.get(name) != null) { throw new IllegalArgumentException("name already added: " + name); } elements.put(name, pair); } /** * Gets the set of name-value pairs contained in this instance. The * result is always unmodifiable. * * @return {@code non-null;} the set of name-value pairs */ public Collection<NameValuePair> getNameValuePairs() { return Collections.unmodifiableCollection(elements.values()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Oleg V. Khaschansky */ /** * This file is based on WGL headers and has been generated by the nativebridge tool. */ package org.apache.harmony.awt.nativebridge.windows; import org.apache.harmony.awt.nativebridge.*; /** * This file is based on WGL headers and has been generated by the nativebridge tool. */ public class WGL extends BasicLibWrapper { static WGL instance; public static synchronized WGL getInstance() { if (instance == null) { instance = new WGL(); } return instance; } private WGL() { org.apache.harmony.awt.Utils.loadLibrary("Win32Wrapper"); init(); } private static native void init(); public final long wglGetProcAddress(String param_0) { Int8Pointer _param_0 = null == param_0? null : nb.createInt8Pointer(param_0, false); long tmp_0 = _param_0 == null ? 0 : _param_0.longLockPointer(); long tmp_ret = wglGetProcAddress(tmp_0); if (_param_0 != null) { _param_0.unlock(); _param_0.free(); } return tmp_ret; } public final long wglGetProcAddress(Int8Pointer param_0) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); long tmp_ret = wglGetProcAddress(tmp_0); if (param_0 != null) { param_0.unlock(); } return tmp_ret; } public final native long wglGetProcAddress(long param_0); public final native long wglCreateContext(long param_0); public static class HGLRC__ extends CommonStructWrapper { public static final int sizeof = 4; HGLRC__(boolean direct) { super(sizeof, direct); } HGLRC__(VoidPointer base) { super(base); } HGLRC__(long addr) { super(addr); } public final void set_unused(int val) { byteBase.setInt32(0, val); } public final int get_unused() { return byteBase.getInt32(0); } public int size() { return sizeof; } } public final HGLRC__ createHGLRC__(boolean direct) { return new HGLRC__(direct); } public final HGLRC__ createHGLRC__(VoidPointer base) { return new HGLRC__(base); } public final HGLRC__ createHGLRC__(long addr) { return new HGLRC__(addr); } public final int wglMakeCurrent(long param_0, WGL.HGLRC__ param_1) { long tmp_0 = param_1 == null ? 0 : param_1.longLockPointer(); int tmp_ret = wglMakeCurrent(param_0, tmp_0); if (param_1 != null) { param_1.unlock(); } return tmp_ret; } public final native int wglMakeCurrent(long param_0, long param_1); public final int wglDestroyPbufferARB(HPBUFFERARB__ param_0) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); int tmp_ret = wglDestroyPbufferARB(tmp_0); if (param_0 != null) { param_0.unlock(); } return tmp_ret; } public final native int wglDestroyPbufferARB(long param_0); public static class HPBUFFERARB__ extends CommonStructWrapper { public static final int sizeof = 4; HPBUFFERARB__(boolean direct) { super(sizeof, direct); } HPBUFFERARB__(VoidPointer base) { super(base); } HPBUFFERARB__(long addr) { super(addr); } public final void set_unused(int val) { byteBase.setInt32(0, val); } public final int get_unused() { return byteBase.getInt32(0); } public int size() { return sizeof; } } public final HPBUFFERARB__ createHPBUFFERARB__(boolean direct) { return new HPBUFFERARB__(direct); } public final HPBUFFERARB__ createHPBUFFERARB__(VoidPointer base) { return new HPBUFFERARB__(base); } public final HPBUFFERARB__ createHPBUFFERARB__(long addr) { return new HPBUFFERARB__(addr); } public final native int SwapBuffers(long param_0); public final int wglReleasePbufferDCARB(WGL.HPBUFFERARB__ param_0, long param_1) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); int tmp_ret = wglReleasePbufferDCARB(tmp_0, param_1); if (param_0 != null) { param_0.unlock(); } return tmp_ret; } public final native int wglReleasePbufferDCARB(long param_0, long param_1); public final WGL.HPBUFFERARB__ wglCreatePbufferARB(long param_0, int param_1, int param_2, int param_3, Int32Pointer param_4) { long tmp_0 = param_4 == null ? 0 : param_4.longLockPointer(); long tmp_ret = wglCreatePbufferARB(param_0, param_1, param_2, param_3, tmp_0); if (param_4 != null) { param_4.unlock(); } return WGL.instance.createHPBUFFERARB__(tmp_ret); } public final native long wglCreatePbufferARB(long param_0, int param_1, int param_2, int param_3, long param_4); public final int wglQueryPbufferARB(WGL.HPBUFFERARB__ param_0, int param_1, Int32Pointer param_2) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); long tmp_1 = param_2 == null ? 0 : param_2.longLockPointer(); int tmp_ret = wglQueryPbufferARB(tmp_0, param_1, tmp_1); if (param_0 != null) { param_0.unlock(); } if (param_2 != null) { param_2.unlock(); } return tmp_ret; } public final native int wglQueryPbufferARB(long param_0, int param_1, long param_2); public final int wglDeleteContext(WGL.HGLRC__ param_0) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); int tmp_ret = wglDeleteContext(tmp_0); if (param_0 != null) { param_0.unlock(); } return tmp_ret; } public final native int wglDeleteContext(long param_0); public final native long wglGetExtensionsStringARB(long param_0); public final int wglChoosePixelFormatARB(long param_0, Int32Pointer param_1, FloatPointer param_2, int param_3, Int32Pointer param_4, Int32Pointer param_5) { long tmp_0 = param_1 == null ? 0 : param_1.longLockPointer(); long tmp_1 = param_2 == null ? 0 : param_2.longLockPointer(); long tmp_2 = param_4 == null ? 0 : param_4.longLockPointer(); long tmp_3 = param_5 == null ? 0 : param_5.longLockPointer(); int tmp_ret = wglChoosePixelFormatARB(param_0, tmp_0, tmp_1, param_3, tmp_2, tmp_3); if (param_1 != null) { param_1.unlock(); } if (param_2 != null) { param_2.unlock(); } if (param_4 != null) { param_4.unlock(); } if (param_5 != null) { param_5.unlock(); } return tmp_ret; } public final native int wglChoosePixelFormatARB(long param_0, long param_1, long param_2, int param_3, long param_4, long param_5); public final int wglShareLists(WGL.HGLRC__ param_0, WGL.HGLRC__ param_1) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); long tmp_1 = param_1 == null ? 0 : param_1.longLockPointer(); int tmp_ret = wglShareLists(tmp_0, tmp_1); if (param_0 != null) { param_0.unlock(); } if (param_1 != null) { param_1.unlock(); } return tmp_ret; } public final native int wglShareLists(long param_0, long param_1); public final long wglGetPbufferDCARB(WGL.HPBUFFERARB__ param_0) { long tmp_0 = param_0 == null ? 0 : param_0.longLockPointer(); long tmp_ret = wglGetPbufferDCARB(tmp_0); if (param_0 != null) { param_0.unlock(); } return tmp_ret; } public final native long wglGetPbufferDCARB(long param_0); }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.br.BrazilianStemFilter; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.UpperCaseFilter; import org.apache.lucene.analysis.cz.CzechStemFilter; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; import org.apache.lucene.analysis.in.IndicNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; import org.apache.lucene.analysis.miscellaneous.LengthFilter; import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilter; import org.apache.lucene.analysis.miscellaneous.ScandinavianFoldingFilter; import org.apache.lucene.analysis.miscellaneous.ScandinavianNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.TrimFilter; import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; import org.apache.lucene.analysis.miscellaneous.UniqueTokenFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.Version; import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory; import org.elasticsearch.index.analysis.LimitTokenCountFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import org.tartarus.snowball.ext.DutchStemmer; import org.tartarus.snowball.ext.FrenchStemmer; import java.util.Locale; /** * */ public enum PreBuiltTokenFilters { WORD_DELIMITER(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new WordDelimiterFilter(tokenStream, WordDelimiterFilter.GENERATE_WORD_PARTS | WordDelimiterFilter.GENERATE_NUMBER_PARTS | WordDelimiterFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterFilter.SPLIT_ON_NUMERICS | WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null); } }, STOP(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET); } }, TRIM(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new TrimFilter(tokenStream); } }, REVERSE(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ReverseStringFilter(tokenStream); } }, ASCIIFOLDING(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ASCIIFoldingFilter(tokenStream); } }, LENGTH(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new LengthFilter(tokenStream, 0, Integer.MAX_VALUE); } }, COMMON_GRAMS(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new CommonGramsFilter(tokenStream, CharArraySet.EMPTY_SET); } }, LOWERCASE(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new LowerCaseFilter(tokenStream); } }, UPPERCASE(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new UpperCaseFilter(tokenStream); } }, KSTEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new KStemFilter(tokenStream); } }, PORTER_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new PorterStemFilter(tokenStream); } }, STANDARD(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new StandardFilter(tokenStream); } }, CLASSIC(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ClassicFilter(tokenStream); } }, NGRAM(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new NGramTokenFilter(tokenStream); } }, EDGE_NGRAM(CachingStrategy.LUCENE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE); } }, UNIQUE(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new UniqueTokenFilter(tokenStream); } }, TRUNCATE(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new TruncateTokenFilter(tokenStream, 10); } }, // Extended Token Filters SNOWBALL(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new SnowballFilter(tokenStream, "English"); } }, STEMMER(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new PorterStemFilter(tokenStream); } }, ELISION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ElisionFilter(tokenStream, FrenchAnalyzer.DEFAULT_ARTICLES); } }, ARABIC_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ArabicStemFilter(tokenStream); } }, BRAZILIAN_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new BrazilianStemFilter(tokenStream); } }, CZECH_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new CzechStemFilter(tokenStream); } }, DUTCH_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new SnowballFilter(tokenStream, new DutchStemmer()); } }, FRENCH_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new SnowballFilter(tokenStream, new FrenchStemmer()); } }, GERMAN_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new GermanStemFilter(tokenStream); } }, RUSSIAN_STEM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new SnowballFilter(tokenStream, "Russian"); } }, KEYWORD_REPEAT(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new KeywordRepeatFilter(tokenStream); } }, ARABIC_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ArabicNormalizationFilter(tokenStream); } }, PERSIAN_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new PersianNormalizationFilter(tokenStream); } }, TYPE_AS_PAYLOAD(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new TypeAsPayloadTokenFilter(tokenStream); } }, SHINGLE(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ShingleFilter(tokenStream); } }, GERMAN_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new GermanNormalizationFilter(tokenStream); } }, HINDI_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new HindiNormalizationFilter(tokenStream); } }, INDIC_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new IndicNormalizationFilter(tokenStream); } }, SORANI_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new SoraniNormalizationFilter(tokenStream); } }, SCANDINAVIAN_NORMALIZATION(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ScandinavianNormalizationFilter(tokenStream); } }, SCANDINAVIAN_FOLDING(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ScandinavianFoldingFilter(tokenStream); } }, APOSTROPHE(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new ApostropheFilter(tokenStream); } }, CJK_WIDTH(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new CJKWidthFilter(tokenStream); } }, DECIMAL_DIGIT(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new DecimalDigitFilter(tokenStream); } }, CJK_BIGRAM(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new CJKBigramFilter(tokenStream); } }, DELIMITED_PAYLOAD_FILTER(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new DelimitedPayloadTokenFilter(tokenStream, DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER, DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER); } }, LIMIT(CachingStrategy.ONE) { @Override public TokenStream create(TokenStream tokenStream, Version version) { return new LimitTokenCountFilter(tokenStream, LimitTokenCountFilterFactory.DEFAULT_MAX_TOKEN_COUNT, LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS); } } ; public abstract TokenStream create(TokenStream tokenStream, Version version); protected final PreBuiltCacheFactory.PreBuiltCache<TokenFilterFactory> cache; PreBuiltTokenFilters(CachingStrategy cachingStrategy) { cache = PreBuiltCacheFactory.getCache(cachingStrategy); } public synchronized TokenFilterFactory getTokenFilterFactory(final Version version) { TokenFilterFactory factory = cache.get(version); if (factory == null) { final String finalName = name(); factory = new TokenFilterFactory() { @Override public String name() { return finalName.toLowerCase(Locale.ROOT); } @Override public TokenStream create(TokenStream tokenStream) { return valueOf(finalName).create(tokenStream, version); } }; cache.put(version, factory); } return factory; } /** * Get a pre built TokenFilter by its name or fallback to the default one * @param name TokenFilter name * @param defaultTokenFilter default TokenFilter if name not found */ public static PreBuiltTokenFilters getOrDefault(String name, PreBuiltTokenFilters defaultTokenFilter) { try { return valueOf(name.toUpperCase(Locale.ROOT)); } catch (IllegalArgumentException e) { return defaultTokenFilter; } } }
package com.github.blutorange.translune.serial; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.activation.MimetypesFileTypeMap; import javax.inject.Inject; import javax.inject.Singleton; import javax.persistence.EntityManager; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Path; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.eclipse.jdt.annotation.Nullable; import org.simpleflatmapper.csv.CsvColumnDefinition; import org.simpleflatmapper.csv.CsvParser; import org.slf4j.Logger; import com.github.blutorange.common.StringUtil; import com.github.blutorange.translune.db.Character; import com.github.blutorange.translune.db.ILunarDatabaseManager; import com.github.blutorange.translune.db.Resource; import com.github.blutorange.translune.db.Resource_; import com.github.blutorange.translune.db.Skill; import com.github.blutorange.translune.ic.Classed; import com.github.blutorange.translune.logic.EStatusCondition; import com.github.blutorange.translune.util.Constants; @Singleton public final class ImportProcessing implements IImportProcessing { @Inject ILunarDatabaseManager databaseManager; @Inject @Classed(ImportProcessing.class) Logger logger; @Inject MimetypesFileTypeMap mimetypesFileTypeMap; @Nullable private AvailableBgAndBgm availableBgAndBgm; @Inject public ImportProcessing() { } @Override public AvailableBgAndBgm availableBgAndBgm() throws IOException { AvailableBgAndBgm availableBgAndBgm = this.availableBgAndBgm; if (availableBgAndBgm != null) return availableBgAndBgm; synchronized (logger) { availableBgAndBgm = this.availableBgAndBgm; if (availableBgAndBgm != null) return availableBgAndBgm; final AvailableBgAndBgm result = databaseManager.withEm(false, em -> { final Map<String, Set<String>> bgMenu = retrieveAvailable(em, Constants.FILE_PREFIX_BG_MENU); final Map<String, Set<String>> bgmMenu = retrieveAvailable(em, Constants.FILE_PREFIX_BGM_MENU); final Map<String, Set<String>> bgBattle = retrieveAvailable(em, Constants.FILE_PREFIX_BG_BATTLE); final Map<String, Set<String>> bgmBattle = retrieveAvailable(em, Constants.FILE_PREFIX_BGM_BATTLE); return new AvailableBgAndBgm(bgMenu, bgmMenu, bgBattle, bgmBattle); }); if (result == null) throw new IOException("failed to load resources"); return this.availableBgAndBgm = result; } } private Map<String, Set<String>> retrieveAvailable(final EntityManager em, final String prefix) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<String> cq = cb.createQuery(String.class); final Path<String> path = cq.from(Resource.class).get(Resource_.name); final List<String> list = em.createQuery(cq.where(cb.like(path, prefix + "%")).select(path)).getResultList(); final Map<String, Set<String>> map = new HashMap<>(); for (final String name : list) { final String key = FilenameUtils.removeExtension(name); map.computeIfAbsent(key, k -> new HashSet<>()).add(name); } return map; } @Override public int importDataSet(final ZipFile zipFile) throws IOException { final Enumeration<? extends ZipEntry> entries = zipFile.entries(); final Set<CharacterCsvModel> characterModels = new HashSet<>(); final Set<SkillCsvModel> skillModel = new HashSet<>(); final Set<CharacterToSkillCsvModel> characterToSkillModels = new HashSet<>(); final Map<String, ZipEntry> filesCry = new HashMap<>(); final Map<String, ZipEntry> filesDefault = new HashMap<>(); final Map<String, ZipEntry> filesImg = new HashMap<>(); final Map<String, ZipEntry> filesIcon = new HashMap<>(); final Map<String, ZipEntry> filesBgMenu = new HashMap<>(); final Map<String, ZipEntry> filesBgBattle = new HashMap<>(); final Map<String, ZipEntry> filesBgmMenu = new HashMap<>(); final Map<String, ZipEntry> filesBgmBattle = new HashMap<>(); // Read data from the ZIP file. while (entries.hasMoreElements()) { final ZipEntry entry = entries.nextElement(); if (entry == null) continue; final String name = entry.getName(); if (name == null) continue; final String parent = FilenameUtils.getBaseName(FilenameUtils.getFullPathNoEndSeparator(entry.getName())); if (entry.isDirectory()) continue; switch (parent.toLowerCase(Locale.ROOT)) { case Constants.IMPORT_DIR_DEFAULT: logger.debug("found default file " + entry.getName()); filesDefault.put(Constants.FILE_PREFIX_DEFAULT + FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_CHARACTER_CRY: logger.debug("found cry sound file " + entry.getName()); filesCry.put(Constants.FILE_PREFIX_CHARACTER_CRY + FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_CHARACTER_IMG: logger.debug("found image file " + entry.getName()); filesImg.put(Constants.FILE_PREFIX_CHARACTER_IMG + FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_CHARACTER_ICON: logger.debug("found image file " + entry.getName()); filesIcon.put(Constants.FILE_PREFIX_CHARACTER_ICON + FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_BG_MENU: logger.debug("found bg menu file " + entry.getName()); filesBgMenu.put(FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_BGM_MENU: logger.debug("found bgm menu file " + entry.getName()); filesBgmMenu.put(FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_BG_BATTLE: logger.debug("found bg battle file " + entry.getName()); filesBgBattle.put(FilenameUtils.getName(name), entry); break; case Constants.IMPORT_DIR_BGM_BATTLE: logger.debug("found bgm battle file " + entry.getName()); filesBgmBattle.put(FilenameUtils.getName(name), entry); break; default: switch (StringUtil.toRootLowerCase(FilenameUtils.getName(name))) { case Constants.IMPORT_FILE_CHARACTER: logger.debug("found character data %s" + entry.getName()); characterModels.addAll(readCharacterCsv(zipFile.getInputStream(entry))); break; case Constants.IMPORT_FILE_CHARACTER_SKILL: logger.debug("found character skill data %s" + entry.getName()); characterToSkillModels.addAll(readCharacterToSkillCsv(zipFile.getInputStream(entry))); break; case Constants.IMPORT_FILE_SKILL: logger.debug("found skill data %s" + entry.getName()); skillModel.addAll(readSkillCsv(zipFile.getInputStream(entry))); break; } } } // Add associations and write entities to the database. final Set<Skill> skills = skillModel.stream().map(SkillCsvModel::toEntity).collect(Collectors.toSet()); attachSkills(characterModels, skills, characterToSkillModels); final Set<Character> characters = characterModels.stream().map(CharacterCsvModel::toEntity) .collect(Collectors.toSet()); final List<Pair<ZipEntry, String>> requiredFiles = new ArrayList<>(); for (final Character character : characters) { assertCharFile(filesImg, requiredFiles, character.getImgBack(), Constants.FILE_PREFIX_CHARACTER_IMG, character); assertCharFile(filesImg, requiredFiles, character.getImgFront(), Constants.FILE_PREFIX_CHARACTER_IMG, character); assertCharFile(filesIcon, requiredFiles, character.getImgIcon(), Constants.FILE_PREFIX_CHARACTER_ICON, character); assertCharFile(filesCry, requiredFiles, character.getCry(), Constants.FILE_PREFIX_CHARACTER_CRY, character); } assertDefaultFiles(filesDefault); addFiles(requiredFiles, filesImg, Constants.FILE_PREFIX_CHARACTER_IMG); addFiles(requiredFiles, filesIcon, Constants.FILE_PREFIX_CHARACTER_ICON); addFiles(requiredFiles, filesCry, Constants.FILE_PREFIX_CHARACTER_CRY); addFiles(requiredFiles, filesBgBattle, Constants.FILE_PREFIX_BG_BATTLE); addFiles(requiredFiles, filesBgmBattle, Constants.FILE_PREFIX_BGM_BATTLE); addFiles(requiredFiles, filesBgMenu, Constants.FILE_PREFIX_BG_MENU); addFiles(requiredFiles, filesBgmMenu, Constants.FILE_PREFIX_BGM_MENU); addFiles(requiredFiles, filesDefault, Constants.FILE_PREFIX_DEFAULT); logger.debug("writing imported files to database"); writeImportToDb(requiredFiles, characters, skills, zipFile); return characters.size() + skills.size() + requiredFiles.size(); } private void assertDefaultFiles(final Map<String, ZipEntry> filesDefault) throws IOException { if (!filesDefault.containsKey(Constants.DEFAULT_PLAYER_AVATAR)) throw new IOException("No default player avatar found, must be named def/pavatar"); } @Override public void uploadFile(final ZipFile zipFile, final ZipEntry entry, final String prefix) throws IOException { final Resource resource = resourceFromZipFile(zipFile, entry, prefix); final Boolean result = databaseManager.withEm(true, em -> { em.persist(resource); return Boolean.TRUE; }); if (!Boolean.TRUE.equals(result)) throw new IOException("failed to persist resource: " + entry.getName()); } private void attachSkills(final Set<CharacterCsvModel> characters, final Set<Skill> skills, final Set<CharacterToSkillCsvModel> charSkills) { final Map<String, CharacterCsvModel> charMap = characters.stream() .collect(Collectors.toMap(CharacterCsvModel::getKey, Function.identity())); final Map<String, Skill> skillMap = skills.stream() .collect(Collectors.toMap(Skill::getName, Function.identity())); for (final CharacterToSkillCsvModel charSkill : charSkills) { final int level = charSkill.getLevel(); final CharacterCsvModel character = charMap.get(charSkill.getCharacterId()); final Skill skill = skillMap.get(charSkill.getSkillId()); if (logger.isDebugEnabled()) { logger.debug("attempting to add character-skill association " + charSkill); logger.debug("found character " + (character == null ? "null" : character.getKey())); logger.debug("found skill " + (skill == null ? "null" : skill.getName())); } if (character == null) throw new IllegalArgumentException("Cannot add skill, specified character not found."); if (skill == null) throw new IllegalArgumentException("Cannot add skill, specified skill not found."); character.addSkill(level, skill); } } private void assertCharFile(final Map<String, ZipEntry> availableFiles, final List<Pair<ZipEntry, String>> requiredFiles, final String name, final String prefix, final Character character) throws IOException { if (!availableFiles.containsKey(name)) throw new IOException( String.format("Character %s specifies the resource %s, but this file was not found in the archive.", character.getName(), name)); final ZipEntry entry = availableFiles.get(name); if (entry != null) requiredFiles.add(new ImmutablePair<>(entry, prefix)); } private void addFiles(final List<Pair<ZipEntry, String>> requiredFiles, final Map<String, ZipEntry> availableFiles, final String prefix) { for (final ZipEntry entry : availableFiles.values()) requiredFiles.add(new ImmutablePair<>(entry, prefix)); } private Set<CharacterCsvModel> readCharacterCsv(final InputStream inputStream) throws IOException { return readTCsv(inputStream, CharacterCsvModel.class); } private Set<CharacterToSkillCsvModel> readCharacterToSkillCsv(final InputStream inputStream) throws IOException { return readTCsv(inputStream, CharacterToSkillCsvModel.class); } private Set<SkillCsvModel> readSkillCsv(final InputStream inputStream) throws IOException { try (Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) { return CsvParser .quote('\"') .separator(',') .mapTo(SkillCsvModel.class) .columnDefinition("condition", CsvColumnDefinition.customReaderDefinition( new CsmEnumCellValueReader<>(EStatusCondition.class))) .stream(reader) .collect(Collectors.toSet()); } } private <T> Set<T> readTCsv(final InputStream inputStream, final Class<T> clazz) throws IOException { try (Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8)) { return CsvParser.quote('\"').separator(',').mapTo(clazz).stream(reader).collect(Collectors.toSet()); } } private Resource resourceFromZipFile(final ZipFile zipFile, final ZipEntry entry, final String prefix) throws IOException { final Resource resource = new Resource(); final String mime = mimetypesFileTypeMap.getContentType(entry.getName()); resource.setMime(mime); resource.setName(prefix + FilenameUtils.getName(entry.getName())); resource.setData(IOUtils.toByteArray(zipFile.getInputStream(entry))); resource.setFilename(FilenameUtils.getName(String.valueOf(entry.getName()))); if (logger.isDebugEnabled()) logger.debug("created resource " + resource); return resource; } private void writeImportToDb(final List<Pair<ZipEntry, String>> requiredFiles, final Set<Character> characters, final Set<Skill> skills, final ZipFile zipFile) throws IOException { databaseManager.flushAndEmpty(); final Throwable result = databaseManager.withEm(true, em -> { try { characters.forEach(character -> { if (em.find(Character.class, character.getPrimaryKey()) != null) em.merge(character); else em.persist(character); }); skills.forEach(skill -> { if (em.find(Skill.class, skill.getPrimaryKey()) != null) em.merge(skill); else em.persist(skill); }); em.flush(); em.clear(); for (final Pair<ZipEntry, String> pair : requiredFiles) { Resource resource = resourceFromZipFile(zipFile, pair.getLeft(), pair.getRight()); if (em.find(Resource.class, resource.getPrimaryKey()) != null) { resource = em.merge(resource); } else em.persist(resource); // Make sure we do not load all files into RAM. em.flush(); em.detach(resource); } return null; } catch (final Exception e) { return e; } }); if (result != null) throw new IOException("Error while persisting data: " + result.getMessage(), result); } }
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.cli.compiler.grape; import java.io.File; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import groovy.lang.GroovyClassLoader; import org.eclipse.aether.DefaultRepositorySystemSession; import org.eclipse.aether.repository.Authentication; import org.eclipse.aether.repository.RemoteRepository; import org.junit.Test; import org.springframework.test.util.ReflectionTestUtils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Tests for {@link AetherGrapeEngine}. * * @author Andy Wilkinson */ public class AetherGrapeEngineTests { private final GroovyClassLoader groovyClassLoader = new GroovyClassLoader(); private final RepositoryConfiguration springMilestones = new RepositoryConfiguration( "spring-milestones", URI.create("https://repo.spring.io/milestone"), false); private AetherGrapeEngine createGrapeEngine( RepositoryConfiguration... additionalRepositories) { List<RepositoryConfiguration> repositoryConfigurations = new ArrayList<RepositoryConfiguration>(); repositoryConfigurations.add(new RepositoryConfiguration("central", URI.create("http://repo1.maven.org/maven2"), false)); repositoryConfigurations.addAll(Arrays.asList(additionalRepositories)); return AetherGrapeEngineFactory.create(this.groovyClassLoader, repositoryConfigurations, new DependencyResolutionContext()); } @Test public void dependencyResolution() { Map<String, Object> args = new HashMap<String, Object>(); createGrapeEngine(this.springMilestones).grab(args, createDependency("org.springframework", "spring-jdbc", "3.2.4.RELEASE")); assertEquals(5, this.groovyClassLoader.getURLs().length); } @Test public void proxySelector() { doWithCustomUserHome(new Runnable() { @Override public void run() { AetherGrapeEngine grapeEngine = createGrapeEngine(); DefaultRepositorySystemSession session = (DefaultRepositorySystemSession) ReflectionTestUtils .getField(grapeEngine, "session"); assertTrue(session.getProxySelector() instanceof CompositeProxySelector); } }); } @Test public void repositoryMirrors() { doWithCustomUserHome(new Runnable() { @SuppressWarnings("unchecked") @Override public void run() { AetherGrapeEngine grapeEngine = createGrapeEngine(); List<RemoteRepository> repositories = (List<RemoteRepository>) ReflectionTestUtils .getField(grapeEngine, "repositories"); assertEquals(1, repositories.size()); assertEquals("central-mirror", repositories.get(0).getId()); } }); } @Test public void repositoryAuthentication() { doWithCustomUserHome(new Runnable() { @SuppressWarnings("unchecked") @Override public void run() { AetherGrapeEngine grapeEngine = createGrapeEngine(); List<RemoteRepository> repositories = (List<RemoteRepository>) ReflectionTestUtils .getField(grapeEngine, "repositories"); assertEquals(1, repositories.size()); Authentication authentication = repositories.get(0).getAuthentication(); assertNotNull(authentication); } }); } @Test public void dependencyResolutionWithExclusions() { Map<String, Object> args = new HashMap<String, Object>(); args.put("excludes", Arrays.asList(createExclusion("org.springframework", "spring-core"))); createGrapeEngine(this.springMilestones).grab(args, createDependency("org.springframework", "spring-jdbc", "3.2.4.RELEASE"), createDependency("org.springframework", "spring-beans", "3.2.4.RELEASE")); assertEquals(3, this.groovyClassLoader.getURLs().length); } @Test public void nonTransitiveDependencyResolution() { Map<String, Object> args = new HashMap<String, Object>(); createGrapeEngine().grab(args, createDependency("org.springframework", "spring-jdbc", "3.2.4.RELEASE", false)); assertEquals(1, this.groovyClassLoader.getURLs().length); } @Test public void dependencyResolutionWithCustomClassLoader() { Map<String, Object> args = new HashMap<String, Object>(); GroovyClassLoader customClassLoader = new GroovyClassLoader(); args.put("classLoader", customClassLoader); createGrapeEngine(this.springMilestones).grab(args, createDependency("org.springframework", "spring-jdbc", "3.2.4.RELEASE")); assertEquals(0, this.groovyClassLoader.getURLs().length); assertEquals(5, customClassLoader.getURLs().length); } @Test public void resolutionWithCustomResolver() { Map<String, Object> args = new HashMap<String, Object>(); AetherGrapeEngine grapeEngine = this.createGrapeEngine(); grapeEngine .addResolver(createResolver("restlet.org", "http://maven.restlet.org")); grapeEngine.grab(args, createDependency("org.restlet", "org.restlet", "1.1.6")); assertEquals(1, this.groovyClassLoader.getURLs().length); } @Test(expected = IllegalArgumentException.class) public void differingTypeAndExt() { Map<String, Object> dependency = createDependency("org.grails", "grails-dependencies", "2.4.0"); dependency.put("type", "foo"); dependency.put("ext", "bar"); createGrapeEngine().grab(Collections.emptyMap(), dependency); } @Test public void pomDependencyResolutionViaType() { Map<String, Object> args = new HashMap<String, Object>(); Map<String, Object> dependency = createDependency("org.springframework", "spring-framework-bom", "4.0.5.RELEASE"); dependency.put("type", "pom"); createGrapeEngine().grab(args, dependency); URL[] urls = this.groovyClassLoader.getURLs(); assertEquals(1, urls.length); assertTrue(urls[0].toExternalForm().endsWith(".pom")); } @Test public void pomDependencyResolutionViaExt() { Map<String, Object> args = new HashMap<String, Object>(); Map<String, Object> dependency = createDependency("org.springframework", "spring-framework-bom", "4.0.5.RELEASE"); dependency.put("ext", "pom"); createGrapeEngine().grab(args, dependency); URL[] urls = this.groovyClassLoader.getURLs(); assertEquals(1, urls.length); assertTrue(urls[0].toExternalForm().endsWith(".pom")); } @Test public void resolutionWithClassifier() { Map<String, Object> args = new HashMap<String, Object>(); Map<String, Object> dependency = createDependency("org.springframework", "spring-jdbc", "3.2.4.RELEASE", false); dependency.put("classifier", "sources"); createGrapeEngine().grab(args, dependency); URL[] urls = this.groovyClassLoader.getURLs(); assertEquals(1, urls.length); assertTrue(urls[0].toExternalForm().endsWith("-sources.jar")); } private Map<String, Object> createDependency(String group, String module, String version) { Map<String, Object> dependency = new HashMap<String, Object>(); dependency.put("group", group); dependency.put("module", module); dependency.put("version", version); return dependency; } private Map<String, Object> createDependency(String group, String module, String version, boolean transitive) { Map<String, Object> dependency = createDependency(group, module, version); dependency.put("transitive", transitive); return dependency; } private Map<String, Object> createResolver(String name, String url) { Map<String, Object> resolver = new HashMap<String, Object>(); resolver.put("name", name); resolver.put("root", url); return resolver; } private Map<String, Object> createExclusion(String group, String module) { Map<String, Object> exclusion = new HashMap<String, Object>(); exclusion.put("group", group); exclusion.put("module", module); return exclusion; } private void doWithCustomUserHome(Runnable action) { doWithSystemProperty("user.home", new File("src/test/resources").getAbsolutePath(), action); } private void doWithSystemProperty(String key, String value, Runnable action) { String previousValue = setOrClearSystemProperty(key, value); try { action.run(); } finally { setOrClearSystemProperty(key, previousValue); } } private String setOrClearSystemProperty(String key, String value) { if (value != null) { return System.setProperty(key, value); } return System.clearProperty(key); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.compute.implementation; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.resourcemanager.compute.ComputeManager; import com.azure.resourcemanager.compute.models.Disallowed; import com.azure.resourcemanager.compute.models.DiskSkuTypes; import com.azure.resourcemanager.compute.models.DiskStorageAccountTypes; import com.azure.resourcemanager.compute.models.Gallery; import com.azure.resourcemanager.compute.models.GalleryImage; import com.azure.resourcemanager.compute.models.GalleryImageIdentifier; import com.azure.resourcemanager.compute.models.GalleryImageUpdate; import com.azure.resourcemanager.compute.models.GalleryImageVersion; import com.azure.resourcemanager.compute.models.ImagePurchasePlan; import com.azure.resourcemanager.compute.models.OperatingSystemStateTypes; import com.azure.resourcemanager.compute.models.OperatingSystemTypes; import com.azure.resourcemanager.compute.models.RecommendedMachineConfiguration; import com.azure.resourcemanager.compute.models.ResourceRange; import com.azure.resourcemanager.compute.fluent.models.GalleryImageInner; import com.azure.core.management.Region; import com.azure.resourcemanager.resources.fluentcore.model.implementation.CreatableUpdatableImpl; import reactor.core.publisher.Mono; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; /** The implementation for GalleryImage and its create and update interfaces. */ class GalleryImageImpl extends CreatableUpdatableImpl<GalleryImage, GalleryImageInner, GalleryImageImpl> implements GalleryImage, GalleryImage.Definition, GalleryImage.Update { private final ComputeManager manager; private String resourceGroupName; private String galleryName; private String galleryImageName; private GalleryImageUpdate galleryImageUpdate; GalleryImageImpl(String name, ComputeManager manager) { super(name, new GalleryImageInner()); this.manager = manager; // Set resource name this.galleryImageName = name; // } GalleryImageImpl(GalleryImageInner inner, ComputeManager manager) { super(inner.name(), inner); this.manager = manager; // Set resource name this.galleryImageName = inner.name(); // resource ancestor names this.resourceGroupName = getValueFromIdByName(inner.id(), "resourceGroups"); this.galleryName = getValueFromIdByName(inner.id(), "galleries"); this.galleryImageName = getValueFromIdByName(inner.id(), "images"); // } @Override public Mono<GalleryImageVersion> getVersionAsync(String versionName) { return this .manager() .galleryImageVersions() .getByGalleryImageAsync(this.resourceGroupName, this.galleryName, this.galleryImageName, versionName); } @Override public GalleryImageVersion getVersion(String versionName) { return this .manager() .galleryImageVersions() .getByGalleryImage(this.resourceGroupName, this.galleryName, this.galleryImageName, versionName); } @Override public PagedFlux<GalleryImageVersion> listVersionsAsync() { return this .manager() .galleryImageVersions() .listByGalleryImageAsync(this.resourceGroupName, this.galleryName, this.galleryImageName); } @Override public PagedIterable<GalleryImageVersion> listVersions() { return this .manager() .galleryImageVersions() .listByGalleryImage(this.resourceGroupName, this.galleryName, this.galleryImageName); } @Override public ComputeManager manager() { return this.manager; } @Override public Mono<GalleryImage> createResourceAsync() { return manager() .serviceClient() .getGalleryImages() .createOrUpdateAsync(this.resourceGroupName, this.galleryName, this.galleryImageName, this.innerModel()) .map(innerToFluentMap(this)); } @Override public GalleryImageImpl update() { this.galleryImageUpdate = new GalleryImageUpdate(); return super.update(); } @Override public Mono<GalleryImage> updateResourceAsync() { this.galleryImageUpdate .withOsState(innerModel().osState()) .withOsType(innerModel().osType()) .withIdentifier(innerModel().identifier()); return manager() .serviceClient() .getGalleryImages() .updateAsync(this.resourceGroupName, this.galleryName, this.galleryImageName, this.galleryImageUpdate) .map(innerToFluentMap(this)); } @Override protected Mono<GalleryImageInner> getInnerAsync() { return manager() .serviceClient() .getGalleryImages() .getAsync(this.resourceGroupName, this.galleryName, this.galleryImageName); } @Override public boolean isInCreateMode() { return this.innerModel().id() == null; } @Override public String description() { return this.innerModel().description(); } @Override public List<DiskSkuTypes> unsupportedDiskTypes() { if (this.innerModel().disallowed() == null || this.innerModel().disallowed().diskTypes() == null) { return Collections.unmodifiableList(new ArrayList<DiskSkuTypes>()); } else { List<DiskSkuTypes> diskTypes = new ArrayList<DiskSkuTypes>(); for (String diskTypeStr : this.innerModel().disallowed().diskTypes()) { diskTypes.add(DiskSkuTypes.fromStorageAccountType(DiskStorageAccountTypes.fromString(diskTypeStr))); } return Collections.unmodifiableList(diskTypes); } } @Override public Disallowed disallowed() { return this.innerModel().disallowed(); } @Override public OffsetDateTime endOfLifeDate() { return this.innerModel().endOfLifeDate(); } @Override public String eula() { return this.innerModel().eula(); } @Override public String id() { return this.innerModel().id(); } @Override public GalleryImageIdentifier identifier() { return this.innerModel().identifier(); } @Override public String location() { return this.innerModel().location(); } @Override public String name() { return this.innerModel().name(); } @Override public OperatingSystemStateTypes osState() { return this.innerModel().osState(); } @Override public OperatingSystemTypes osType() { return this.innerModel().osType(); } @Override public String privacyStatementUri() { return this.innerModel().privacyStatementUri(); } @Override public String provisioningState() { return this.innerModel().provisioningState().toString(); } @Override public ImagePurchasePlan purchasePlan() { return this.innerModel().purchasePlan(); } @Override public RecommendedMachineConfiguration recommendedVirtualMachineConfiguration() { return this.innerModel().recommended(); } @Override public String releaseNoteUri() { return this.innerModel().releaseNoteUri(); } @Override public Map<String, String> tags() { return this.innerModel().tags(); } @Override public String type() { return this.innerModel().type(); } @Override public GalleryImageImpl withExistingGallery(String resourceGroupName, String galleryName) { this.resourceGroupName = resourceGroupName; this.galleryName = galleryName; return this; } @Override public GalleryImageImpl withExistingGallery(Gallery gallery) { this.resourceGroupName = gallery.resourceGroupName(); this.galleryName = gallery.name(); return this; } @Override public GalleryImageImpl withLocation(String location) { this.innerModel().withLocation(location); return this; } @Override public GalleryImageImpl withLocation(Region location) { this.innerModel().withLocation(location.toString()); return this; } @Override public GalleryImageImpl withIdentifier(GalleryImageIdentifier identifier) { this.innerModel().withIdentifier(identifier); return this; } @Override public GalleryImageImpl withIdentifier(String publisher, String offer, String sku) { this .innerModel() .withIdentifier(new GalleryImageIdentifier().withPublisher(publisher).withOffer(offer).withSku(sku)); return this; } @Override public GalleryImageImpl withGeneralizedWindows() { return this.withWindows(OperatingSystemStateTypes.GENERALIZED); } @Override public GalleryImageImpl withGeneralizedLinux() { return this.withLinux(OperatingSystemStateTypes.GENERALIZED); } @Override public GalleryImageImpl withWindows(OperatingSystemStateTypes osState) { this.innerModel().withOsType(OperatingSystemTypes.WINDOWS).withOsState(osState); return this; } @Override public GalleryImageImpl withLinux(OperatingSystemStateTypes osState) { this.innerModel().withOsType(OperatingSystemTypes.LINUX).withOsState(osState); return this; } @Override public GalleryImageImpl withDescription(String description) { this.innerModel().withDescription(description); if (isInUpdateMode()) { this.galleryImageUpdate.withDescription(description); } return this; } @Override public GalleryImageImpl withUnsupportedDiskType(DiskSkuTypes diskType) { if (this.innerModel().disallowed() == null) { this.innerModel().withDisallowed(new Disallowed()); } if (this.innerModel().disallowed().diskTypes() == null) { this.innerModel().disallowed().withDiskTypes(new ArrayList<String>()); } boolean found = false; String newDiskTypeStr = diskType.toString(); for (String diskTypeStr : this.innerModel().disallowed().diskTypes()) { if (diskTypeStr.equalsIgnoreCase(newDiskTypeStr)) { found = true; break; } } if (!found) { this.innerModel().disallowed().diskTypes().add(diskType.toString()); } if (isInUpdateMode()) { this.galleryImageUpdate.withDisallowed(this.innerModel().disallowed()); } return this; } @Override public GalleryImageImpl withUnsupportedDiskTypes(List<DiskSkuTypes> diskTypes) { if (this.innerModel().disallowed() == null) { this.innerModel().withDisallowed(new Disallowed()); } this.innerModel().disallowed().withDiskTypes(new ArrayList<String>()); for (DiskSkuTypes diskType : diskTypes) { this.innerModel().disallowed().diskTypes().add(diskType.toString()); } if (isInUpdateMode()) { this.galleryImageUpdate.withDisallowed(this.innerModel().disallowed()); } return this; } @Override public GalleryImageImpl withoutUnsupportedDiskType(DiskSkuTypes diskType) { if (this.innerModel().disallowed() != null && this.innerModel().disallowed().diskTypes() != null) { int foundIndex = -1; int i = 0; String diskTypeToRemove = diskType.toString(); for (String diskTypeStr : this.innerModel().disallowed().diskTypes()) { if (diskTypeStr.equalsIgnoreCase(diskTypeToRemove)) { foundIndex = i; break; } i++; } if (foundIndex != -1) { this.innerModel().disallowed().diskTypes().remove(foundIndex); } if (isInUpdateMode()) { this.galleryImageUpdate.withDisallowed(this.innerModel().disallowed()); } } return this; } @Override public GalleryImageImpl withDisallowed(Disallowed disallowed) { this.innerModel().withDisallowed(disallowed); if (isInUpdateMode()) { this.galleryImageUpdate.withDisallowed(disallowed); } return this; } @Override public GalleryImageImpl withEndOfLifeDate(OffsetDateTime endOfLifeDate) { this.innerModel().withEndOfLifeDate(endOfLifeDate); if (isInUpdateMode()) { this.galleryImageUpdate.withEndOfLifeDate(endOfLifeDate); } return this; } @Override public GalleryImageImpl withEula(String eula) { this.innerModel().withEula(eula); if (isInUpdateMode()) { this.galleryImageUpdate.withEula(eula); } return this; } @Override public GalleryImageImpl withOsState(OperatingSystemStateTypes osState) { this.innerModel().withOsState(osState); if (isInUpdateMode()) { this.galleryImageUpdate.withOsState(osState); } return this; } @Override public GalleryImageImpl withPrivacyStatementUri(String privacyStatementUri) { this.innerModel().withPrivacyStatementUri(privacyStatementUri); if (isInUpdateMode()) { this.galleryImageUpdate.withPrivacyStatementUri(privacyStatementUri); } return this; } @Override public GalleryImageImpl withPurchasePlan(String name, String publisher, String product) { return this .withPurchasePlan(new ImagePurchasePlan().withName(name).withPublisher(publisher).withProduct(product)); } @Override public GalleryImageImpl withPurchasePlan(ImagePurchasePlan purchasePlan) { this.innerModel().withPurchasePlan(purchasePlan); return this; } @Override public GalleryImageImpl withRecommendedMinimumCPUsCountForVirtualMachine(int minCount) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } if (this.innerModel().recommended().vCPUs() == null) { this.innerModel().recommended().withVCPUs(new ResourceRange()); } this.innerModel().recommended().vCPUs().withMin(minCount); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedMaximumCPUsCountForVirtualMachine(int maxCount) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } if (this.innerModel().recommended().vCPUs() == null) { this.innerModel().recommended().withVCPUs(new ResourceRange()); } this.innerModel().recommended().vCPUs().withMax(maxCount); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedCPUsCountForVirtualMachine(int minCount, int maxCount) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } this.innerModel().recommended().withVCPUs(new ResourceRange()); this.innerModel().recommended().vCPUs().withMin(minCount); this.innerModel().recommended().vCPUs().withMax(maxCount); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedMinimumMemoryForVirtualMachine(int minMB) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } if (this.innerModel().recommended().memory() == null) { this.innerModel().recommended().withMemory(new ResourceRange()); } this.innerModel().recommended().memory().withMin(minMB); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedMaximumMemoryForVirtualMachine(int maxMB) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } if (this.innerModel().recommended().memory() == null) { this.innerModel().recommended().withMemory(new ResourceRange()); } this.innerModel().recommended().memory().withMax(maxMB); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedMemoryForVirtualMachine(int minMB, int maxMB) { if (this.innerModel().recommended() == null) { this.innerModel().withRecommended(new RecommendedMachineConfiguration()); } this.innerModel().recommended().withMemory(new ResourceRange()); this.innerModel().recommended().memory().withMin(minMB); this.innerModel().recommended().memory().withMax(maxMB); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(this.innerModel().recommended()); } return this; } @Override public GalleryImageImpl withRecommendedConfigurationForVirtualMachine( RecommendedMachineConfiguration recommendedConfig) { this.innerModel().withRecommended(recommendedConfig); if (isInUpdateMode()) { this.galleryImageUpdate.withRecommended(recommendedConfig); } return this; } @Override public GalleryImageImpl withReleaseNoteUri(String releaseNoteUri) { this.innerModel().withReleaseNoteUri(releaseNoteUri); if (isInUpdateMode()) { this.galleryImageUpdate.withReleaseNoteUri(releaseNoteUri); } return this; } @Override public GalleryImageImpl withTags(Map<String, String> tags) { this.innerModel().withTags(tags); if (isInUpdateMode()) { this.galleryImageUpdate.withTags(tags); } return this; } private boolean isInUpdateMode() { return !isInCreateMode(); } private static String getValueFromIdByName(String id, String name) { if (id == null) { return null; } Iterable<String> iterable = Arrays.asList(id.split("/")); Iterator<String> itr = iterable.iterator(); while (itr.hasNext()) { String part = itr.next(); if (part != null && !part.trim().isEmpty()) { if (part.equalsIgnoreCase(name)) { if (itr.hasNext()) { return itr.next(); } else { return null; } } } } return null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.apex.malhar.lib.state.spillable; import java.util.List; import javax.validation.constraints.NotNull; import org.apache.apex.malhar.lib.utils.serde.Serde; import org.apache.hadoop.classification.InterfaceStability; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.datatorrent.api.Context; import com.datatorrent.netlet.util.Slice; /** * This is a factory that is used for Spillable datastructures. This component is used by nesting it inside of an * operator and forwarding the appropriate operator callbacks are called on the {@link SpillableComplexComponentImpl}. * Spillable datastructures are created by called the appropriate factory methods on the * {@link SpillableComplexComponentImpl} in the setup method of an operator. * * @since 3.5.0 */ @InterfaceStability.Evolving public class SpillableComplexComponentImpl implements SpillableComplexComponent { private List<SpillableComponent> componentList = Lists.newArrayList(); @NotNull private SpillableStateStore store; @NotNull private SpillableIdentifierGenerator identifierGenerator; private SpillableComplexComponentImpl() { // for kryo } public SpillableComplexComponentImpl(SpillableStateStore store) { this(store, new SequentialSpillableIdentifierGenerator()); } public SpillableComplexComponentImpl(SpillableStateStore store, SpillableIdentifierGenerator identifierGenerator) { this.store = Preconditions.checkNotNull(store); this.identifierGenerator = Preconditions.checkNotNull(identifierGenerator); } public <T> SpillableList<T> newSpillableArrayList(long bucket, Serde<T, Slice> serde) { SpillableArrayListImpl<T> list = new SpillableArrayListImpl<T>(bucket, identifierGenerator.next(), store, serde); componentList.add(list); return list; } public <T> SpillableList<T> newSpillableArrayList(byte[] identifier, long bucket, Serde<T, Slice> serde) { identifierGenerator.register(identifier); SpillableArrayListImpl<T> list = new SpillableArrayListImpl<T>(bucket, identifier, store, serde); componentList.add(list); return list; } public <K, V> SpillableMap<K, V> newSpillableMap(long bucket, Serde<K, Slice> serdeKey, Serde<V, Slice> serdeValue) { SpillableMapImpl<K, V> map = new SpillableMapImpl<K, V>(store, identifierGenerator.next(), bucket, serdeKey, serdeValue); componentList.add(map); return map; } public <K, V> SpillableMap<K, V> newSpillableMap(byte[] identifier, long bucket, Serde<K, Slice> serdeKey, Serde<V, Slice> serdeValue) { identifierGenerator.register(identifier); SpillableMapImpl<K, V> map = new SpillableMapImpl<K, V>(store, identifier, bucket, serdeKey, serdeValue); componentList.add(map); return map; } public <K, V> SpillableListMultimap<K, V> newSpillableArrayListMultimap(long bucket, Serde<K, Slice> serdeKey, Serde<V, Slice> serdeValue) { SpillableArrayListMultimapImpl<K, V> map = new SpillableArrayListMultimapImpl<K, V>(store, identifierGenerator.next(), bucket, serdeKey, serdeValue); componentList.add(map); return map; } public <K, V> SpillableListMultimap<K, V> newSpillableArrayListMultimap(byte[] identifier, long bucket, Serde<K, Slice> serdeKey, Serde<V, Slice> serdeValue) { identifierGenerator.register(identifier); SpillableArrayListMultimapImpl<K, V> map = new SpillableArrayListMultimapImpl<K, V>(store, identifier, bucket, serdeKey, serdeValue); componentList.add(map); return map; } public <K, V> SpillableSetMultimap<K, V> newSpillableSetMultimap(long bucket, Serde<K, Slice> serdeKey, Serde<V, Slice> serdeValue) { SpillableSetMultimapImpl<K, V> map = new SpillableSetMultimapImpl<K, V>(store, identifierGenerator.next(), bucket, serdeKey, serdeValue); componentList.add(map); return map; } public <T> SpillableMultiset<T> newSpillableMultiset(long bucket, Serde<T, Slice> serde) { throw new UnsupportedOperationException("Unsupported Operation"); } public <T> SpillableMultiset<T> newSpillableMultiset(byte[] identifier, long bucket, Serde<T, Slice> serde) { throw new UnsupportedOperationException("Unsupported Operation"); } public <T> SpillableQueue<T> newSpillableQueue(long bucket, Serde<T, Slice> serde) { throw new UnsupportedOperationException("Unsupported Operation"); } public <T> SpillableQueue<T> newSpillableQueue(byte[] identifier, long bucket, Serde<T, Slice> serde) { throw new UnsupportedOperationException("Unsupported Operation"); } @Override public void setup(Context.OperatorContext context) { store.setup(context); for (SpillableComponent spillableComponent: componentList) { spillableComponent.setup(context); } } @Override public void beginWindow(long windowId) { store.beginWindow(windowId); for (SpillableComponent spillableComponent: componentList) { spillableComponent.beginWindow(windowId); } } @Override public void endWindow() { for (SpillableComponent spillableComponent: componentList) { spillableComponent.endWindow(); } store.endWindow(); } @Override public void teardown() { for (SpillableComponent spillableComponent: componentList) { spillableComponent.teardown(); } store.teardown(); } @Override public void beforeCheckpoint(long l) { store.beforeCheckpoint(l); } @Override public void checkpointed(long l) { store.checkpointed(l); } @Override public void committed(long l) { store.committed(l); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/v1/bigtable_service_messages.proto package com.google.bigtable.v1; /** * <pre> * Response message for BigtableService.CheckAndMutateRowRequest. * </pre> * * Protobuf type {@code google.bigtable.v1.CheckAndMutateRowResponse} */ public final class CheckAndMutateRowResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.bigtable.v1.CheckAndMutateRowResponse) CheckAndMutateRowResponseOrBuilder { // Use CheckAndMutateRowResponse.newBuilder() to construct. private CheckAndMutateRowResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CheckAndMutateRowResponse() { predicateMatched_ = false; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private CheckAndMutateRowResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 8: { predicateMatched_ = input.readBool(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.v1.BigtableServiceMessagesProto.internal_static_google_bigtable_v1_CheckAndMutateRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.v1.BigtableServiceMessagesProto.internal_static_google_bigtable_v1_CheckAndMutateRowResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.v1.CheckAndMutateRowResponse.class, com.google.bigtable.v1.CheckAndMutateRowResponse.Builder.class); } public static final int PREDICATE_MATCHED_FIELD_NUMBER = 1; private boolean predicateMatched_; /** * <pre> * Whether or not the request's "predicate_filter" yielded any results for * the specified row. * </pre> * * <code>optional bool predicate_matched = 1;</code> */ public boolean getPredicateMatched() { return predicateMatched_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (predicateMatched_ != false) { output.writeBool(1, predicateMatched_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (predicateMatched_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(1, predicateMatched_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.bigtable.v1.CheckAndMutateRowResponse)) { return super.equals(obj); } com.google.bigtable.v1.CheckAndMutateRowResponse other = (com.google.bigtable.v1.CheckAndMutateRowResponse) obj; boolean result = true; result = result && (getPredicateMatched() == other.getPredicateMatched()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + PREDICATE_MATCHED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( getPredicateMatched()); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.bigtable.v1.CheckAndMutateRowResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.bigtable.v1.CheckAndMutateRowResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Response message for BigtableService.CheckAndMutateRowRequest. * </pre> * * Protobuf type {@code google.bigtable.v1.CheckAndMutateRowResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.bigtable.v1.CheckAndMutateRowResponse) com.google.bigtable.v1.CheckAndMutateRowResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.bigtable.v1.BigtableServiceMessagesProto.internal_static_google_bigtable_v1_CheckAndMutateRowResponse_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.bigtable.v1.BigtableServiceMessagesProto.internal_static_google_bigtable_v1_CheckAndMutateRowResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.bigtable.v1.CheckAndMutateRowResponse.class, com.google.bigtable.v1.CheckAndMutateRowResponse.Builder.class); } // Construct using com.google.bigtable.v1.CheckAndMutateRowResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); predicateMatched_ = false; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.bigtable.v1.BigtableServiceMessagesProto.internal_static_google_bigtable_v1_CheckAndMutateRowResponse_descriptor; } public com.google.bigtable.v1.CheckAndMutateRowResponse getDefaultInstanceForType() { return com.google.bigtable.v1.CheckAndMutateRowResponse.getDefaultInstance(); } public com.google.bigtable.v1.CheckAndMutateRowResponse build() { com.google.bigtable.v1.CheckAndMutateRowResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.bigtable.v1.CheckAndMutateRowResponse buildPartial() { com.google.bigtable.v1.CheckAndMutateRowResponse result = new com.google.bigtable.v1.CheckAndMutateRowResponse(this); result.predicateMatched_ = predicateMatched_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.bigtable.v1.CheckAndMutateRowResponse) { return mergeFrom((com.google.bigtable.v1.CheckAndMutateRowResponse)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.bigtable.v1.CheckAndMutateRowResponse other) { if (other == com.google.bigtable.v1.CheckAndMutateRowResponse.getDefaultInstance()) return this; if (other.getPredicateMatched() != false) { setPredicateMatched(other.getPredicateMatched()); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.bigtable.v1.CheckAndMutateRowResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.bigtable.v1.CheckAndMutateRowResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private boolean predicateMatched_ ; /** * <pre> * Whether or not the request's "predicate_filter" yielded any results for * the specified row. * </pre> * * <code>optional bool predicate_matched = 1;</code> */ public boolean getPredicateMatched() { return predicateMatched_; } /** * <pre> * Whether or not the request's "predicate_filter" yielded any results for * the specified row. * </pre> * * <code>optional bool predicate_matched = 1;</code> */ public Builder setPredicateMatched(boolean value) { predicateMatched_ = value; onChanged(); return this; } /** * <pre> * Whether or not the request's "predicate_filter" yielded any results for * the specified row. * </pre> * * <code>optional bool predicate_matched = 1;</code> */ public Builder clearPredicateMatched() { predicateMatched_ = false; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.bigtable.v1.CheckAndMutateRowResponse) } // @@protoc_insertion_point(class_scope:google.bigtable.v1.CheckAndMutateRowResponse) private static final com.google.bigtable.v1.CheckAndMutateRowResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.bigtable.v1.CheckAndMutateRowResponse(); } public static com.google.bigtable.v1.CheckAndMutateRowResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CheckAndMutateRowResponse> PARSER = new com.google.protobuf.AbstractParser<CheckAndMutateRowResponse>() { public CheckAndMutateRowResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CheckAndMutateRowResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CheckAndMutateRowResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CheckAndMutateRowResponse> getParserForType() { return PARSER; } public com.google.bigtable.v1.CheckAndMutateRowResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.tetopology.management.impl; import java.util.List; import java.util.Map; import org.apache.commons.collections.MapUtils; import org.onosproject.tetopology.management.api.KeyId; import org.onosproject.tetopology.management.api.Network; import org.onosproject.tetopology.management.api.TeTopologyKey; import org.onosproject.tetopology.management.api.link.NetworkLink; import org.onosproject.tetopology.management.api.link.NetworkLinkKey; import org.onosproject.tetopology.management.api.node.NetworkNode; import org.onosproject.tetopology.management.api.node.NetworkNodeKey; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** * Network representation in store. */ public class InternalNetwork { private TeTopologyKey teTopologyKey; private List<KeyId> supportingNetworkIds; private boolean serverProvided; private List<NetworkNodeKey> nodeIds; private List<NetworkLinkKey> linkIds; private boolean childUpdate = false; /** * Creates an instance of InternalNetwork. * * @param network the Network object */ public InternalNetwork(Network network) { this.supportingNetworkIds = network .supportingNetworkIds() == null ? null : Lists.newArrayList(network .supportingNetworkIds()); this.serverProvided = network.isServerProvided(); // NetworkNodeKey if (MapUtils.isNotEmpty(network.nodes())) { this.nodeIds = Lists.newArrayList(); for (Map.Entry<KeyId, NetworkNode> entry : network.nodes().entrySet()) { this.nodeIds.add(new NetworkNodeKey(network.networkId(), entry.getKey())); } } // NetworkLinkKey if (MapUtils.isNotEmpty(network.links())) { this.linkIds = Lists.newArrayList(); for (Map.Entry<KeyId, NetworkLink> entry : network.links().entrySet()) { this.linkIds.add(new NetworkLinkKey(network.networkId(), entry.getKey())); } } } /** * Creates a default instance of InternalNetwork. */ public InternalNetwork() { } /** * Returns the supporting network Ids. * * @return the supportingNetworkIds */ public List<KeyId> supportingNetworkIds() { if (supportingNetworkIds == null) { return null; } return ImmutableList.copyOf(supportingNetworkIds); } /** * Returns if the network topology is provided by a server or is * configured by a client. * * @return true if the network is provided by a server; false otherwise */ public boolean serverProvided() { return serverProvided; } /** * @param serverProvided the serverProvided to set */ public void setServerProvided(boolean serverProvided) { this.serverProvided = serverProvided; } /** * Returns the list of node Ids in the network. * * @return the nodeIds */ public List<NetworkNodeKey> nodeIds() { return nodeIds; } /** * Returns the TE topology key for the network. * * @return the teTopologyKey */ public TeTopologyKey teTopologyKey() { return teTopologyKey; } /** * Sets the TE topology key for the network. * * @param teTopologyKey the teTopologyKey to set */ public void setTeTopologyKey(TeTopologyKey teTopologyKey) { this.teTopologyKey = teTopologyKey; } /** * Set the list of node Ids in the network. * * @param nodeIds the nodeIds to set */ public void setNodeIds(List<NetworkNodeKey> nodeIds) { this.nodeIds = nodeIds; } /** * Returns the list of link Ids in the network. * * @return the linkIds */ public List<NetworkLinkKey> linkIds() { return linkIds; } /** * Set the list of link Ids in the network. * * @param linkIds the linkIds to set */ public void setLinkIds(List<NetworkLinkKey> linkIds) { this.linkIds = linkIds; } /** * Returns the flag if the data was updated by child change. * * @return value of childUpdate */ public boolean childUpdate() { return childUpdate; } /** * Sets the flag if the data was updated by child change. * * @param childUpdate the childUpdate value to set */ public void setChildUpdate(boolean childUpdate) { this.childUpdate = childUpdate; } @Override public int hashCode() { return Objects.hashCode(teTopologyKey, nodeIds, linkIds, supportingNetworkIds, serverProvided); } @Override public boolean equals(Object object) { if (this == object) { return true; } if (object instanceof InternalNetwork) { InternalNetwork that = (InternalNetwork) object; return Objects.equal(this.teTopologyKey, that.teTopologyKey) && Objects.equal(this.nodeIds, that.nodeIds) && Objects.equal(this.linkIds, that.linkIds) && Objects.equal(this.supportingNetworkIds, that.supportingNetworkIds) && Objects.equal(this.serverProvided, that.serverProvided); } return false; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("teTopologyKey", teTopologyKey) .add("nodeIds", nodeIds) .add("linkIds", linkIds) .add("supportingNetworkIds", supportingNetworkIds) .add("serverProvided", serverProvided) .add("childUpdate", childUpdate) .toString(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2020 the original author or authors. */ package org.assertj.core.api.iterable; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatNullPointerException; import static org.assertj.core.api.GroupAssertTestHelper.comparatorForElementFieldsWithNamesOf; import static org.assertj.core.api.GroupAssertTestHelper.comparatorForElementFieldsWithTypeOf; import static org.assertj.core.api.GroupAssertTestHelper.comparatorsByTypeOf; import static org.assertj.core.presentation.UnicodeRepresentation.UNICODE_REPRESENTATION; import static org.assertj.core.test.AlwaysEqualComparator.ALWAY_EQUALS_STRING; import static org.assertj.core.test.AlwaysEqualComparator.ALWAY_EQUALS_TIMESTAMP; import static org.assertj.core.test.AlwaysEqualComparator.alwaysEqual; import static org.assertj.core.util.Lists.newArrayList; import java.sql.Timestamp; import java.util.List; import java.util.function.Function; import org.assertj.core.api.AbstractIterableAssert; import org.assertj.core.api.AbstractListAssert; import org.assertj.core.test.AlwaysEqualComparator; import org.assertj.core.test.CartoonCharacter; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * Tests for <code>{@link AbstractIterableAssert#flatExtracting(Function)}</code> * * @author Mateusz Haligowski */ class IterableAssert_flatExtracting_Test { private CartoonCharacter bart; private CartoonCharacter lisa; private CartoonCharacter maggie; private CartoonCharacter homer; private CartoonCharacter pebbles; private CartoonCharacter fred; private static final ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception> childrenThrowingExtractor = CartoonCharacter::getChildren; private static final Function<CartoonCharacter, List<CartoonCharacter>> children = CartoonCharacter::getChildren; @SuppressWarnings("deprecation") private static final Extractor<CartoonCharacter, List<CartoonCharacter>> childrenExtractor = new Extractor<CartoonCharacter, List<CartoonCharacter>>() { @Override public List<CartoonCharacter> extract(CartoonCharacter input) { return input.getChildren(); } }; private final ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception> throwingExtractor = new ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception>() { @Override public List<CartoonCharacter> extractThrows(CartoonCharacter cartoonCharacter) throws Exception { if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children"); return cartoonCharacter.getChildren(); } }; @BeforeEach void setUp() { bart = new CartoonCharacter("Bart Simpson"); lisa = new CartoonCharacter("Lisa Simpson"); maggie = new CartoonCharacter("Maggie Simpson"); homer = new CartoonCharacter("Homer Simpson"); homer.addChildren(bart, lisa, maggie); pebbles = new CartoonCharacter("Pebbles Flintstone"); fred = new CartoonCharacter("Fred Flintstone"); fred.addChildren(pebbles); } @Test void should_allow_assertions_on_joined_lists_when_extracting_children_with_extractor() { assertThat(newArrayList(homer, fred)).flatExtracting(childrenExtractor) .containsOnly(bart, lisa, maggie, pebbles); } @Test void should_allow_assertions_on_joined_lists_when_extracting_children() { assertThat(newArrayList(homer, fred)).flatExtracting(children) .containsOnly(bart, lisa, maggie, pebbles); } @Test void should_allow_assertions_on_empty_result_lists_with_extractor() { assertThat(newArrayList(bart, lisa, maggie)).flatExtracting(childrenExtractor) .isEmpty(); } @Test void should_allow_assertions_on_empty_result_lists() { assertThat(newArrayList(bart, lisa, maggie)).flatExtracting(children) .isEmpty(); } @Test void should_bubble_up_null_pointer_exception_from_extractor() { assertThatNullPointerException().isThrownBy(() -> assertThat(newArrayList(homer, null)).flatExtracting(childrenExtractor)); } @Test void should_bubble_up_null_pointer_exception_from_lambda_extractor() { assertThatNullPointerException().isThrownBy(() -> assertThat(newArrayList(homer, null)).flatExtracting(children)); } @Test void should_rethrow_throwing_extractor_checked_exception_as_a_runtime_exception() { List<CartoonCharacter> childCharacters = newArrayList(bart, lisa, maggie); assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> assertThat(childCharacters).flatExtracting(cartoonCharacter -> { if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children"); return cartoonCharacter.getChildren(); })).withMessage("java.lang.Exception: no children"); } @Test void should_let_throwing_extractor_runtime_exception_bubble_up() { List<CartoonCharacter> childCharacters = newArrayList(bart, lisa, maggie); assertThatExceptionOfType(RuntimeException.class).isThrownBy(() -> assertThat(childCharacters).flatExtracting(cartoonCharacter -> { if (cartoonCharacter.getChildren().isEmpty()) throw new RuntimeException("no children"); return cartoonCharacter.getChildren(); })).withMessage("no children"); } @Test void should_allow_assertions_on_joined_lists_when_extracting_children_with_throwing_extractor() { List<CartoonCharacter> cartoonCharacters = newArrayList(homer, fred); assertThat(cartoonCharacters).flatExtracting(cartoonCharacter -> { if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children"); return cartoonCharacter.getChildren(); }).containsOnly(bart, lisa, maggie, pebbles); } @Test void should_allow_assertions_on_joined_lists_when_extracting_children_with_anonymous_class_throwing_extractor() { List<CartoonCharacter> cartoonCharacters = newArrayList(homer, fred); assertThat(cartoonCharacters).flatExtracting(new ThrowingExtractor<CartoonCharacter, List<CartoonCharacter>, Exception>() { @Override public List<CartoonCharacter> extractThrows(CartoonCharacter cartoonCharacter) throws Exception { if (cartoonCharacter.getChildren().isEmpty()) throw new Exception("no children"); return cartoonCharacter.getChildren(); } }).containsOnly(bart, lisa, maggie, pebbles); } @Test void should_keep_existing_description_if_set_when_extracting_using_extractor() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting(childrenExtractor) .isEmpty()) .withMessageContaining("[expected description]"); } @Test void should_keep_existing_description_if_set_when_extracting_using_function() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting(children) .isEmpty()) .withMessageContaining("[expected description]"); } @Test void should_keep_existing_description_if_set_when_extracting_using_single_field_name() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting("children") .isEmpty()) .withMessageContaining("[expected description]"); } @Test void should_keep_existing_description_if_set_when_extracting_using_multiple_field_names() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting("children", "name") .isEmpty()) .withMessageContaining("[expected description]"); } @Test void should_keep_existing_description_if_set_when_extracting_using_multiple_function_varargs() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting(children, children) .isEmpty()) .withMessageContaining("[expected description]"); } @Test void should_keep_existing_description_if_set_when_extracting_using_multiple_throwing_extractors_varargs() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(newArrayList(homer)).as("expected description") .flatExtracting(throwingExtractor, throwingExtractor) .isEmpty()) .withMessageContaining("[expected description]"); } @Test void flatExtracting_should_keep_assertion_state_with_extractor() { // GIVEN AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual(); // WHEN // not all comparators are used but we want to test that they are passed correctly after extracting // @format:off AbstractListAssert<?, ?, ?, ?> assertion = assertThat(newArrayList(homer, fred)).as("test description") .withFailMessage("error message") .withRepresentation(UNICODE_REPRESENTATION) .usingComparatorForElementFieldsWithNames(ALWAY_EQUALS_STRING, "foo") .usingComparatorForElementFieldsWithType(ALWAY_EQUALS_TIMESTAMP, Timestamp.class) .usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class) .flatExtracting(childrenExtractor) .contains(bart, lisa, new CartoonCharacter("Unknown")); // @format:on // THEN assertThat(assertion.descriptionText()).isEqualTo("test description"); assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION); assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message"); assertThat(comparatorsByTypeOf(assertion).get(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator); assertThat(comparatorForElementFieldsWithTypeOf(assertion).get(Timestamp.class)).isSameAs(ALWAY_EQUALS_TIMESTAMP); assertThat(comparatorForElementFieldsWithNamesOf(assertion).get("foo")).isSameAs(ALWAY_EQUALS_STRING); } @Test void flatExtracting_should_keep_assertion_state() { // GIVEN AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual(); // WHEN // not all comparators are used but we want to test that they are passed correctly after extracting // @format:off AbstractListAssert<?, ?, ?, ?> assertion = assertThat(newArrayList(homer, fred)).as("test description") .withFailMessage("error message") .withRepresentation(UNICODE_REPRESENTATION) .usingComparatorForElementFieldsWithNames(ALWAY_EQUALS_STRING, "foo") .usingComparatorForElementFieldsWithType(ALWAY_EQUALS_TIMESTAMP, Timestamp.class) .usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class) .flatExtracting(children) .contains(bart, lisa, new CartoonCharacter("Unknown")); // @format:on // THEN assertThat(assertion.descriptionText()).isEqualTo("test description"); assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION); assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message"); assertThat(comparatorsByTypeOf(assertion).get(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator); assertThat(comparatorForElementFieldsWithTypeOf(assertion).get(Timestamp.class)).isSameAs(ALWAY_EQUALS_TIMESTAMP); assertThat(comparatorForElementFieldsWithNamesOf(assertion).get("foo")).isSameAs(ALWAY_EQUALS_STRING); } @Test void flatExtracting_with_ThrowingExtractor_should_keep_assertion_state() { // GIVEN AlwaysEqualComparator<CartoonCharacter> cartoonCharacterAlwaysEqualComparator = alwaysEqual(); // WHEN // not all comparators are used but we want to test that they are passed correctly after extracting // @format:off AbstractListAssert<?, ?, ?, ?> assertion = assertThat(newArrayList(homer, fred)).as("test description") .withFailMessage("error message") .withRepresentation(UNICODE_REPRESENTATION) .usingComparatorForElementFieldsWithNames(ALWAY_EQUALS_STRING, "foo") .usingComparatorForElementFieldsWithType(ALWAY_EQUALS_TIMESTAMP, Timestamp.class) .usingComparatorForType(cartoonCharacterAlwaysEqualComparator, CartoonCharacter.class) .flatExtracting(childrenThrowingExtractor) .contains(bart, lisa, new CartoonCharacter("Unknown")); // @format:on // THEN assertThat(assertion.descriptionText()).isEqualTo("test description"); assertThat(assertion.info.representation()).isEqualTo(UNICODE_REPRESENTATION); assertThat(assertion.info.overridingErrorMessage()).isEqualTo("error message"); assertThat(comparatorsByTypeOf(assertion).get(CartoonCharacter.class)).isSameAs(cartoonCharacterAlwaysEqualComparator); assertThat(comparatorForElementFieldsWithTypeOf(assertion).get(Timestamp.class)).isSameAs(ALWAY_EQUALS_TIMESTAMP); assertThat(comparatorForElementFieldsWithNamesOf(assertion).get("foo")).isSameAs(ALWAY_EQUALS_STRING); } }
/* * Copyright 2017 Mirko Sertic * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.mirkosertic.bytecoder.unittest; import com.sun.net.httpserver.HttpServer; import de.mirkosertic.bytecoder.allocator.Allocator; import de.mirkosertic.bytecoder.backend.CompileOptions; import de.mirkosertic.bytecoder.backend.CompileResult; import de.mirkosertic.bytecoder.backend.CompileTarget; import de.mirkosertic.bytecoder.backend.LLVMOptimizationLevel; import de.mirkosertic.bytecoder.backend.js.JSCompileResult; import de.mirkosertic.bytecoder.backend.llvm.LLVMCompileResult; import de.mirkosertic.bytecoder.backend.llvm.LLVMWriterUtils; import de.mirkosertic.bytecoder.backend.wasm.WASMCompileResult; import de.mirkosertic.bytecoder.backend.wasm.WASMWriterUtils; import de.mirkosertic.bytecoder.core.BytecodeMethodSignature; import de.mirkosertic.bytecoder.core.BytecodeObjectTypeRef; import de.mirkosertic.bytecoder.core.BytecodePrimitiveTypeRef; import de.mirkosertic.bytecoder.core.BytecodeTypeRef; import de.mirkosertic.bytecoder.optimizer.KnownOptimizer; import org.apache.commons.io.FileUtils; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.Description; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunNotifier; import org.junit.runners.ParentRunner; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.InitializationError; import org.junit.runners.model.TestClass; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeOptions; import org.openqa.selenium.logging.LogEntry; import org.openqa.selenium.logging.LogType; import org.openqa.selenium.logging.LoggingPreferences; import org.openqa.selenium.remote.CapabilityType; import org.testcontainers.Testcontainers; import org.testcontainers.containers.BrowserWebDriverContainer; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Level; public class BytecoderUnitTestRunner extends ParentRunner<FrameworkMethodWithTestOption> { private static final Slf4JLogger LOGGER = new Slf4JLogger(); private final List<TestOption> testOptions; private final String[] additionalClassesToLink; private final String[] additionalResources; private static HttpServer TESTSERVER; private static BrowserWebDriverContainer SELENIUMCONTAINER; private static final AtomicReference<File> HTTPFILESDIR = new AtomicReference<>(); public BytecoderUnitTestRunner(final Class aClass) throws InitializationError { super(aClass); testOptions = new ArrayList<>(); final BytecoderTestOptions declaredOptions = getTestClass().getJavaClass().getAnnotation(BytecoderTestOptions.class); if (declaredOptions != null) { if (declaredOptions.includeJVM()) { testOptions.add(new TestOption(null, false, false, false, false)); } if (declaredOptions.value().length == 0 && declaredOptions.includeTestPermutations()) { testOptions.add(new TestOption(CompileTarget.BackendType.js, false, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.js, false, false, true, false)); testOptions.add(new TestOption(CompileTarget.BackendType.js, true, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm, false, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm, true, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm_llvm, false, false, false, false)); //testOptions.add(new TestOption(CompileTarget.BackendType.wasm_llvm, false, false, false, true)); } else { for (final BytecoderTestOption o : declaredOptions.value()) { testOptions.add(new TestOption(o.backend(), o.preferStackifier(), o.exceptionsEnabled(), o.minify(), o. escapeAnalysisEnabled())); } } additionalClassesToLink = declaredOptions.additionalClassesToLink(); additionalResources = declaredOptions.additionalResources(); } else { testOptions.add(new TestOption(null, false, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.js, false, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.js, false, false, true, false)); testOptions.add(new TestOption(CompileTarget.BackendType.js, true, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm, false, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm, true, false, false, false)); testOptions.add(new TestOption(CompileTarget.BackendType.wasm_llvm, false, false, false, false)); //testOptions.add(new TestOption(CompileTarget.BackendType.wasm_llvm, false, false, false, true)); additionalClassesToLink = new String[0]; additionalResources = new String[0]; } } @Override public Description getDescription() { final TestClass testClass = getTestClass(); return Description.createSuiteDescription(testClass.getName(), testClass.getJavaClass().getAnnotations()); } @Override protected List<FrameworkMethodWithTestOption> getChildren() { final List<FrameworkMethodWithTestOption> testMethods = new ArrayList<>(); final TestClass testClass = getTestClass(); final Method[] classMethods = testClass.getJavaClass().getDeclaredMethods(); for (final Method classMethod : classMethods) { final Class retClass = classMethod.getReturnType(); final int length = classMethod.getParameterTypes().length; final int modifiers = classMethod.getModifiers(); if (null == retClass || 0 != length || Modifier.isStatic(modifiers) || !Modifier.isPublic(modifiers) || Modifier.isInterface(modifiers) || Modifier.isAbstract(modifiers)) { continue; } final String methodName = classMethod.getName(); if (methodName.toUpperCase().startsWith("TEST") || null != classMethod.getAnnotation(Test.class)) { if (classMethod.isAnnotationPresent(Ignore.class)) { testMethods.add(new FrameworkMethodWithTestOption(classMethod, testOptions.get(0))); } else { for (final TestOption o : testOptions) { testMethods.add(new FrameworkMethodWithTestOption(classMethod, o)); } } } } return testMethods; } @Override protected Description describeChild(final FrameworkMethodWithTestOption frameworkMethod) { final TestClass testClass = getTestClass(); return Description.createTestDescription(testClass.getJavaClass(), frameworkMethod.getName()); } private void testJVMBackendFrameworkMethod(final FrameworkMethod aFrameworkMethod, final RunNotifier aRunNotifier) { if ("".equals(System.getProperty("BYTECODER_DISABLE_JVMTESTS", ""))) { final TestClass testClass = getTestClass(); final Description theDescription = Description.createTestDescription(testClass.getJavaClass(), aFrameworkMethod.getName() + " JVM Target"); aRunNotifier.fireTestStarted(theDescription); try { // Simply invoke using reflection final Object theInstance = testClass.getJavaClass().getDeclaredConstructor().newInstance(); final Method theMethod = aFrameworkMethod.getMethod(); theMethod.invoke(theInstance); aRunNotifier.fireTestFinished(theDescription); } catch (final Exception e) { aRunNotifier.fireTestFailure(new Failure(theDescription, e)); } } } private static int getTestWebServerPort() { return Integer.parseInt(System.getProperty("BYTECODER_TESTSERVERPORT", "10000")); } private static synchronized BrowserWebDriverContainer initializeSeleniumContainer() { if (SELENIUMCONTAINER == null) { java.util.logging.Logger.getLogger("org.openqa.selenium").setLevel(Level.OFF); final ChromeOptions theOptions = new ChromeOptions().setHeadless(true); theOptions.addArguments("--js-flags=experimental-wasm-eh"); theOptions.addArguments("--enable-experimental-wasm-eh"); theOptions.addArguments("disable-infobars"); // disabling infobars theOptions.addArguments("--disable-dev-shm-usage"); // overcome limited resource problems theOptions.addArguments("--no-sandbox"); // Bypass OS security model theOptions.setExperimentalOption("useAutomationExtension", false); final LoggingPreferences theLoggingPreferences = new LoggingPreferences(); theLoggingPreferences.enable(LogType.BROWSER, Level.ALL); theOptions.setCapability(CapabilityType.LOGGING_PREFS, theLoggingPreferences); theOptions.setCapability("goog:loggingPrefs", theLoggingPreferences); Testcontainers.exposeHostPorts(getTestWebServerPort()); SELENIUMCONTAINER = new BrowserWebDriverContainer() .withCapabilities(theOptions) .withRecordingMode(BrowserWebDriverContainer.VncRecordingMode.SKIP, new File(".")); SELENIUMCONTAINER.start(); Runtime.getRuntime().addShutdownHook(new Thread(() -> SELENIUMCONTAINER.stop())); } return SELENIUMCONTAINER; } private static void initializeTestWebServer() throws IOException { if (TESTSERVER == null) { java.util.logging.Logger.getLogger("sun.net.httpserver.ExchangeImpl").setLevel(Level.OFF); TESTSERVER = HttpServer.create(); final int port = getTestWebServerPort(); TESTSERVER.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), port), 20); TESTSERVER.createContext("/", httpExchange -> { final File filesDir = HTTPFILESDIR.get(); final String fileName = httpExchange.getRequestURI().getPath(); final int lastSlash = fileName.lastIndexOf('/'); final String requestedFileName = fileName.substring(lastSlash + 1); final File requestedFile = new File(filesDir, requestedFileName); if (requestedFile.exists()) { if (requestedFileName.endsWith(".html")) { httpExchange.getResponseHeaders().add("Content-Type", "text/html"); } else if (requestedFileName.endsWith(".js")) { httpExchange.getResponseHeaders().add("Content-Type", "text/javascript"); } else if (requestedFileName.endsWith(".wasm")) { httpExchange.getResponseHeaders().add("Content-Type", "application/wasm"); } else { httpExchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); } httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, requestedFile.length()); FileUtils.copyFile(requestedFile, httpExchange.getResponseBody()); } else { httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_NOT_FOUND, 0); } httpExchange.close(); }); TESTSERVER.start(); Runtime.getRuntime().addShutdownHook(new Thread(() -> TESTSERVER.stop(0))); } } private static void initializeWebRoot(final File aFile) { HTTPFILESDIR.set(aFile); } private static URL getTestFileUrl(final File aFile) throws MalformedURLException { final String theFileName = aFile.getName(); final InetSocketAddress theServerAddress = TESTSERVER.getAddress(); return new URL(String.format("http://%s:%d/%s", "host.testcontainers.internal", theServerAddress.getPort(), theFileName)); } private void testJSBackendFrameworkMethod(final FrameworkMethod aFrameworkMethod, final RunNotifier aRunNotifier, final TestOption aTestOption) { if ("".equals(System.getProperty("BYTECODER_DISABLE_JSTESTS", ""))) { final TestClass testClass = getTestClass(); final Description theDescription = Description.createTestDescription(testClass.getJavaClass(), aFrameworkMethod.getName() + " " + aTestOption.toDescription()); aRunNotifier.fireTestStarted(theDescription); try { final CompileTarget theCompileTarget = new CompileTarget(testClass.getJavaClass().getClassLoader(), CompileTarget.BackendType.js); final BytecodeMethodSignature theSignature = theCompileTarget.toMethodSignature(aFrameworkMethod.getMethod()); final BytecodeObjectTypeRef theTestClass = new BytecodeObjectTypeRef(testClass.getName()); final BytecodeMethodSignature theTestClassConstructorSignature = new BytecodeMethodSignature(BytecodePrimitiveTypeRef.VOID, new BytecodeTypeRef[0]); final StringWriter theStrWriter = new StringWriter(); final PrintWriter theCodeWriter = new PrintWriter(theStrWriter); final CompileOptions theOptions = new CompileOptions(LOGGER, true, KnownOptimizer.ALL, aTestOption.isExceptionsEnabled(), "bytecoder", 512, 512, aTestOption.isMinify(), aTestOption.isPreferStackifier(), Allocator.linear, additionalClassesToLink, additionalResources, null, aTestOption.isEscapeAnalysisEnabled()); final JSCompileResult result = (JSCompileResult) theCompileTarget.compile(theOptions, testClass.getJavaClass(), aFrameworkMethod.getName(), theSignature); final CompileResult.StringContent content = (CompileResult.StringContent) result.getContent()[0]; theCodeWriter.println(content.asString()); final String theFilename = result.getMinifier().toClassName(theTestClass) + "." + result.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix() + ".html"; theCodeWriter.println(); theCodeWriter.println("console.log(\"Starting test\");"); theCodeWriter.println("bytecoder.bootstrap();"); theCodeWriter.println("var theTestInstance = " + result.getMinifier().toClassName(theTestClass) + "." + result.getMinifier().toSymbol("__runtimeclass") + "." + result.getMinifier().toMethodName("$newInstance", theTestClassConstructorSignature) + "();"); theCodeWriter.println("try {"); theCodeWriter.println(" theTestInstance." + result.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "();"); theCodeWriter.println(" console.log(\"Test finished OK\");"); theCodeWriter.println("} catch (e) {"); theCodeWriter.println(" if (e.exception) {"); theCodeWriter.println(" console.log(\"Test finished with exception. Message = \" + bytecoder.toJSString(e.exception.message));"); theCodeWriter.println(" } else {"); theCodeWriter.println(" console.log(\"Test finished with exception.\");"); theCodeWriter.println(" }"); theCodeWriter.println(" console.log(e.stack);"); theCodeWriter.println("}"); theCodeWriter.flush(); final File theWorkingDirectory = new File("."); initializeTestWebServer(); final BrowserWebDriverContainer theContainer = initializeSeleniumContainer(); final File theMavenTargetDir = new File(theWorkingDirectory, "target"); final File theGeneratedFilesDir = new File(theMavenTargetDir, "bytecoderjs"); theGeneratedFilesDir.mkdirs(); // Copy additional resources for (final CompileResult.Content c : result.getContent()) { if (c instanceof CompileResult.URLContent) { try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, c.getFileName()))) { c.writeTo(fos); } } } final File theGeneratedFile = new File(theGeneratedFilesDir, theFilename); final PrintWriter theWriter = new PrintWriter(theGeneratedFile); theWriter.println("<html><body><script>"); theWriter.println(theStrWriter.toString()); theWriter.println("</script></body></html>"); theWriter.flush(); theWriter.close(); initializeWebRoot(theGeneratedFile.getParentFile()); final URL theTestURL = getTestFileUrl(theGeneratedFile); final WebDriver theDriver = theContainer.getWebDriver(); theDriver.get(theTestURL.toString()); final List<LogEntry> theAll = theDriver.manage().logs().get(LogType.BROWSER).getAll(); if (1 > theAll.size()) { aRunNotifier.fireTestFailure(new Failure(theDescription, new RuntimeException("No console output from browser"))); } for (final LogEntry theEntry : theAll) { LOGGER.info(theEntry.getMessage()); } final LogEntry theLast = theAll.get(theAll.size() - 1); if (!theLast.getMessage().contains("Test finished OK")) { aRunNotifier.fireTestFailure(new Failure(theDescription, new RuntimeException("Test did not succeed! Got : " + theLast.getMessage()))); } } catch (final Exception e) { aRunNotifier.fireTestFailure(new Failure(theDescription, e)); } finally { aRunNotifier.fireTestFinished(theDescription); } } } private void testWASMASTBackendFrameworkMethod(final FrameworkMethod aFrameworkMethod, final RunNotifier aRunNotifier, final TestOption aTestOption) { if (!"".equals(System.getProperty("BYTECODER_DISABLE_WASMTESTS_STACKIFY", "")) && aTestOption.isPreferStackifier()) { return; } if (!"".equals(System.getProperty("BYTECODER_DISABLE_WASMTESTS_RELOOP", "")) && !aTestOption.isPreferStackifier()) { return; } if ("".equals(System.getProperty("BYTECODER_DISABLE_WASMTESTS", ""))) { final TestClass testClass = getTestClass(); final Description theDescription = Description.createTestDescription(testClass.getJavaClass(), aFrameworkMethod.getName() + " " + aTestOption.toDescription()); aRunNotifier.fireTestStarted(theDescription); try { final CompileTarget theCompileTarget = new CompileTarget(testClass.getJavaClass().getClassLoader(), CompileTarget.BackendType.wasm); final BytecodeMethodSignature theSignature = theCompileTarget.toMethodSignature(aFrameworkMethod.getMethod()); final BytecodeObjectTypeRef theTestClassType = new BytecodeObjectTypeRef(testClass.getName()); final CompileOptions theOptions = new CompileOptions(LOGGER, true, KnownOptimizer.ALL, false, "bytecoder", 512, 512, aTestOption.isMinify(), aTestOption.isPreferStackifier(), Allocator.linear, additionalClassesToLink, additionalResources, null, aTestOption.isEscapeAnalysisEnabled()); final WASMCompileResult theResult = (WASMCompileResult) theCompileTarget.compile(theOptions, testClass.getJavaClass(), aFrameworkMethod.getName(), theSignature); final WASMCompileResult.WASMCompileContent textualContent = (WASMCompileResult.WASMCompileContent) theResult.getContent()[0]; final WASMCompileResult.WASMCompileContent binaryContent = (WASMCompileResult.WASMCompileContent)theResult.getContent()[1]; final WASMCompileResult.WASMCompileContent jsContent = (WASMCompileResult.WASMCompileContent)theResult.getContent()[2]; final WASMCompileResult.WASMCompileContent sourceMapContent = (WASMCompileResult.WASMCompileContent)theResult.getContent()[3]; final String theFileName = theResult.getMinifier().toClassName(theTestClassType) + "." + theResult.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix()+ ".html"; final File theWorkingDirectory = new File("."); initializeTestWebServer(); final BrowserWebDriverContainer theContainer = initializeSeleniumContainer(); final File theMavenTargetDir = new File(theWorkingDirectory, "target"); final File theGeneratedFilesDir = new File(theMavenTargetDir, "bytecoderwasm"); theGeneratedFilesDir.mkdirs(); final File theGeneratedFile = new File(theGeneratedFilesDir, theFileName); final String theWASMFileName = theResult.getMinifier().toClassName(theTestClassType) + "." + theResult.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix() + ".wasm"; final PrintWriter theWriter = new PrintWriter(theGeneratedFile); theWriter.println("<html>"); theWriter.println(" <body>"); theWriter.println(" <h1>Module code</h1>"); theWriter.println(" <h1>Compilation result</h1>"); theWriter.println(" <pre id=\"compileresult\">"); theWriter.println(" </pre>"); theWriter.println(" <script>"); theWriter.println(jsContent.asString()); theWriter.println(" function compile() {"); theWriter.println(" console.log('Test started');"); theWriter.println(" try {"); theWriter.println(" var theInstantiatePromise = WebAssembly.instantiateStreaming(fetch('" + theWASMFileName + "'), bytecoder.imports);"); theWriter.println(" theInstantiatePromise.then("); theWriter.println(" function (resolved) {"); theWriter.println(" var wasmModule = resolved.module;"); theWriter.println(" bytecoder.init(resolved.instance);"); theWriter.println(" bytecoder.exports.initMemory(0);"); theWriter.println(" console.log(\"Memory initialized\")"); theWriter.println(" console.log(\"Used memory in bytes \" + bytecoder.exports.usedMem());"); theWriter.println(" console.log(\"Free memory in bytes \" + bytecoder.exports.freeMem());"); theWriter.println(" bytecoder.exports.bootstrap(0);"); theWriter.println(" bytecoder.initializeFileIO();"); theWriter.println(" console.log(\"Used memory after bootstrap in bytes \" + bytecoder.exports.usedMem());"); theWriter.println(" console.log(\"Free memory after bootstrap in bytes \" + bytecoder.exports.freeMem());"); theWriter.println(" console.log(\"Creating test instance\")"); theWriter.print(" var theTest = bytecoder.exports."); theWriter.print(WASMWriterUtils.toMethodName(theTestClassType, "$newInstance", new BytecodeMethodSignature(BytecodePrimitiveTypeRef.VOID, new BytecodeTypeRef[0]))); theWriter.println("(0);"); theWriter.println(" console.log(\"Bootstrapped\")"); //theWriter.println(" bytecoder.exports.logAllocations(0);"); theWriter.println(" try {"); theWriter.println(" console.log(\"Starting main method\")"); theWriter.println(" bytecoder.exports.main(theTest);"); theWriter.println(" console.log(\"Main finished\")"); theWriter.println(" console.log(\"Test finished OK\")"); theWriter.println(" } catch (e) {"); theWriter.println(" console.log(\"Test threw error\")"); theWriter.println(" throw e;"); theWriter.println(" }"); theWriter.println(" },"); theWriter.println(" function (rejected) {"); theWriter.println(" console.log(\"Error instantiating webassembly\");"); theWriter.println(" console.log(rejected);"); theWriter.println(" }"); theWriter.println(" );"); theWriter.println(" } catch (e) {"); theWriter.println(" document.getElementById(\"compileresult\").innerText = e.toString();"); theWriter.println(" console.log(e.toString());"); theWriter.println(" console.log(e.stack);"); theWriter.println(" if (bytecoder.runningInstance) {"); theWriter.println(" }"); theWriter.println(" }"); theWriter.println(" }"); theWriter.println(); theWriter.println(" compile();"); theWriter.println(" </script>"); theWriter.println(" </body>"); theWriter.println("</html>"); theWriter.flush(); theWriter.close(); try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, theResult.getMinifier().toClassName(theTestClassType) + "." + theResult.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix() + ".wat"))) { textualContent.writeTo(fos); } try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, theResult.getMinifier().toClassName(theTestClassType) + "." + theResult.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix() + ".js"))) { jsContent.writeTo(fos); } try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, theWASMFileName))) { binaryContent.writeTo(fos); } try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, theResult.getMinifier().toClassName(theTestClassType) + "." + theResult.getMinifier().toMethodName(aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix() + ".wasm.map"))) { sourceMapContent.writeTo(fos); } // Copy additional resources for (final CompileResult.Content c : theResult.getContent()) { if (c instanceof CompileResult.URLContent) { try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, c.getFileName()))) { c.writeTo(fos); } } } initializeWebRoot(theGeneratedFile.getParentFile()); // Invoke test in browser final WebDriver theDriver = theContainer.getWebDriver(); final URL theTestURL = getTestFileUrl(theGeneratedFile); theDriver.get(theTestURL.toString()); final long theStart = System.currentTimeMillis(); boolean theTestSuccedded = false; while (!theTestSuccedded && 10 * 1000 > System.currentTimeMillis() - theStart) { final List<LogEntry> theAll = theDriver.manage().logs().get(LogType.BROWSER).getAll(); for (final LogEntry theEntry : theAll) { final String theMessage = theEntry.getMessage(); System.out.println(theMessage); if (theMessage.contains("Test finished OK")) { theTestSuccedded = true; } } if (!theTestSuccedded) { Thread.sleep(100); } } if (!theTestSuccedded) { aRunNotifier.fireTestFailure(new Failure(theDescription, new RuntimeException("Test did not succeed!"))); } } catch (final Exception e) { aRunNotifier.fireTestFailure(new Failure(theDescription, e)); } finally { aRunNotifier.fireTestFinished(theDescription); } } } private void testLLVMWASMASTBackendFrameworkMethod(final FrameworkMethod aFrameworkMethod, final RunNotifier aRunNotifier, final TestOption aTestOption) { if ("".equals(System.getProperty("BYTECODER_DISABLE_LLVMWASMTESTS", ""))) { final TestClass testClass = getTestClass(); final Description theDescription = Description.createTestDescription(testClass.getJavaClass(), aFrameworkMethod.getName() + " " + aTestOption.toDescription()); aRunNotifier.fireTestStarted(theDescription); try { final CompileTarget theCompileTarget = new CompileTarget(testClass.getJavaClass().getClassLoader(), CompileTarget.BackendType.wasm_llvm); final BytecodeMethodSignature theSignature = theCompileTarget.toMethodSignature(aFrameworkMethod.getMethod()); final BytecodeObjectTypeRef theTypeRef = new BytecodeObjectTypeRef(testClass.getName()); final CompileOptions theOptions = new CompileOptions(LOGGER, true, KnownOptimizer.ALL, false, "bytecoder", 512, 512, aTestOption.isMinify(), aTestOption.isPreferStackifier(), Allocator.linear, additionalClassesToLink, additionalResources, LLVMOptimizationLevel.defaultValue(), aTestOption.isEscapeAnalysisEnabled()); final LLVMCompileResult theResult = (LLVMCompileResult) theCompileTarget.compile(theOptions, testClass.getJavaClass(), aFrameworkMethod.getName(), theSignature); final CompileResult.StringContent textualContent = (CompileResult.StringContent) theResult.getContent()[0]; final CompileResult.StringContent jsContent = (CompileResult.StringContent)theResult.getContent()[1]; final CompileResult.BinaryContent binaryContent = (CompileResult.BinaryContent)theResult.getContent()[3]; final String theFileName = LLVMWriterUtils.toMethodName(theTypeRef, aFrameworkMethod.getName(), theSignature) + "_" + aTestOption.toFilePrefix()+ ".html"; final String theWASMFileName = LLVMWriterUtils.toMethodName(theTypeRef, aFrameworkMethod.getName(), theSignature) + "_bytecoder.wasm"; final File theWorkingDirectory = new File("."); initializeTestWebServer(); final BrowserWebDriverContainer theContainer = initializeSeleniumContainer(); final File theMavenTargetDir = new File(theWorkingDirectory, "target"); final File theGeneratedFilesDir = new File(theMavenTargetDir, "bytecoderllvmwasm"); theGeneratedFilesDir.mkdirs(); final File theGeneratedFile = new File(theGeneratedFilesDir, theFileName); final PrintWriter theWriter = new PrintWriter(theGeneratedFile); theWriter.println("<html>"); theWriter.println(" <body>"); theWriter.println(" <h1>Module code</h1>"); theWriter.println(" <h1>Compilation result</h1>"); theWriter.println(" <pre id=\"compileresult\">"); theWriter.println(" </pre>"); theWriter.println(" <script>"); theWriter.println(jsContent.asString()); theWriter.println(" function compile() {"); theWriter.println(" console.log('Test started');"); theWriter.println(" try {"); theWriter.println(); theWriter.println(" var theInstantiatePromise = WebAssembly.instantiateStreaming(fetch('" + theWASMFileName + "'), bytecoder.imports);"); theWriter.println(" theInstantiatePromise.then("); theWriter.println(" function (resolved) {"); theWriter.println(" var wasmModule = resolved.module;"); theWriter.println(" bytecoder.init(resolved.instance);"); theWriter.println(" bytecoder.exports.initMemory(0);"); theWriter.println(" console.log(\"Memory initialized\")"); theWriter.println(" console.log(\"Used memory in bytes \" + bytecoder.exports.usedMem());"); theWriter.println(" console.log(\"Free memory in bytes \" + bytecoder.exports.freeMem());"); theWriter.println(" bytecoder.exports.bootstrap(0);"); theWriter.println(" bytecoder.initializeFileIO();"); theWriter.println(" console.log(\"Used memory after bootstrap in bytes \" + bytecoder.exports.usedMem());"); theWriter.println(" console.log(\"Free memory after bootstrap in bytes \" + bytecoder.exports.freeMem());"); theWriter.println(" console.log(\"Creating test instance\")"); theWriter.print(" var theClass = bytecoder.exports."); theWriter.print(LLVMWriterUtils.toClassName(theTypeRef)); theWriter.println("__init();"); theWriter.print(" var theTest = bytecoder.exports."); theWriter.print(LLVMWriterUtils.toClassName(theTypeRef)); theWriter.println("_VOID$newInstance(theClass);"); theWriter.println(" console.log(\"Bootstrapped\")"); theWriter.println(" try {"); theWriter.println(" console.log(\"Starting main method\");"); theWriter.println(" bytecoder.exports.main(theTest);"); theWriter.println(" console.log(\"Main finished\");"); theWriter.println(" console.log(\"Test finished OK\");"); theWriter.println(" } catch (e) {"); theWriter.println(" console.log(\"Test threw error\");"); theWriter.println(" throw e;"); theWriter.println(" }"); theWriter.println(" },"); theWriter.println(" function (rejected) {"); theWriter.println(" console.log(\"Error instantiating webassembly\");"); theWriter.println(" console.log(rejected);"); theWriter.println(" }"); theWriter.println(" );"); theWriter.println(" } catch (e) {"); theWriter.println(" document.getElementById(\"compileresult\").innerText = e.toString();"); theWriter.println(" console.log(e.toString());"); theWriter.println(" console.log(e.stack);"); theWriter.println(" if (bytecoder.runningInstance) {"); theWriter.println(" }"); theWriter.println(" }"); theWriter.println(" }"); theWriter.println(); theWriter.println(" compile();"); theWriter.println(" </script>"); theWriter.println(" </body>"); theWriter.println("</html>"); theWriter.flush(); theWriter.close(); for (final CompileResult.Content theContent : theResult.getContent()) { if (theContent instanceof CompileResult.URLContent) { try (final FileOutputStream fos = new FileOutputStream(new File(theGeneratedFilesDir, theContent.getFileName()))) { theContent.writeTo(fos); } } else { final File targetFile = new File(theGeneratedFilesDir, LLVMWriterUtils.toMethodName(theTypeRef, aFrameworkMethod.getName(), theSignature) + "_" + theContent.getFileName()); try (final FileOutputStream fos = new FileOutputStream(targetFile)) { theContent.writeTo(fos); } } } initializeWebRoot(theGeneratedFile.getParentFile()); // Invoke test in browser final WebDriver theDriver = theContainer.getWebDriver(); final URL theTestURL = getTestFileUrl(theGeneratedFile); final long theStart = System.currentTimeMillis(); boolean theTestSuccedded = false; theDriver.get(theTestURL.toString()); while (!theTestSuccedded && 10 * 1000 > System.currentTimeMillis() - theStart) { final List<LogEntry> theAll = theDriver.manage().logs().get(LogType.BROWSER).getAll(); for (final LogEntry theEntry : theAll) { final String theMessage = theEntry.getMessage(); System.out.println(theMessage); if (theMessage.contains("Test finished OK")) { theTestSuccedded = true; } } if (!theTestSuccedded) { Thread.sleep(100); } } if (!theTestSuccedded) { aRunNotifier.fireTestFailure(new Failure(theDescription, new RuntimeException("Test did not succeed!"))); } } catch (final Exception e) { aRunNotifier.fireTestFailure(new Failure(theDescription, e)); } finally { aRunNotifier.fireTestFinished(theDescription); } } } @Override protected void runChild(final FrameworkMethodWithTestOption aFrameworkMethod, final RunNotifier aRunNotifier) { // do not execute ignored tests, only report them if (aFrameworkMethod.getMethod().isAnnotationPresent(Ignore.class)) { aRunNotifier.fireTestIgnored( Description.createTestDescription( getTestClass().getJavaClass(), aFrameworkMethod.getName() ) ); return; } final TestOption o = aFrameworkMethod.getTestOption(); if (o.getBackendType() == null) { testJVMBackendFrameworkMethod(aFrameworkMethod, aRunNotifier); } else { switch (o.getBackendType()) { case js: testJSBackendFrameworkMethod(aFrameworkMethod, aRunNotifier, o); break; case wasm: testWASMASTBackendFrameworkMethod(aFrameworkMethod, aRunNotifier, o); break; case wasm_llvm: testLLVMWASMASTBackendFrameworkMethod(aFrameworkMethod, aRunNotifier, o); break; default: throw new IllegalStateException("Unsupported backend :" + o.getBackendType()); } } } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.layoutlib.bridge; import com.android.tools.layoutlib.annotations.LayoutlibDelegate; import com.android.tools.layoutlib.create.CreateInfo; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import junit.framework.TestCase; /** * Tests that native delegate classes implement all the required methods. * * This looks at {@link CreateInfo#DELEGATE_CLASS_NATIVES} to get the list of classes that * have their native methods reimplemented through a delegate. * * Since the reimplemented methods are not native anymore, we look for the annotation * {@link LayoutlibDelegate}, and look for a matching method in the delegate (named the same * as the modified class with _Delegate added as a suffix). * If the original native method is not static, then we make sure the delegate method also * include the original class as first parameter (to access "this"). * */ public class TestDelegates extends TestCase { public void testNativeDelegates() { final String[] classes = CreateInfo.DELEGATE_CLASS_NATIVES; final int count = classes.length; for (int i = 0 ; i < count ; i++) { loadAndCompareClasses(classes[i], classes[i] + "_Delegate"); } } public void testMethodDelegates() { final String[] methods = CreateInfo.DELEGATE_METHODS; final int count = methods.length; for (int i = 0 ; i < count ; i++) { String methodName = methods[i]; // extract the class name String className = methodName.substring(0, methodName.indexOf('#')); String targetClassName = className.replace('$', '_') + "_Delegate"; loadAndCompareClasses(className, targetClassName); } } private void loadAndCompareClasses(String originalClassName, String delegateClassName) { // load the classes try { ClassLoader classLoader = TestDelegates.class.getClassLoader(); Class<?> originalClass = classLoader.loadClass(originalClassName); Class<?> delegateClass = classLoader.loadClass(delegateClassName); compare(originalClass, delegateClass); } catch (ClassNotFoundException e) { fail("Failed to load class: " + e.getMessage()); } catch (SecurityException e) { fail("Failed to load class: " + e.getMessage()); } } private void compare(Class<?> originalClass, Class<?> delegateClass) throws SecurityException { List<Method> checkedDelegateMethods = new ArrayList<Method>(); // loop on the methods of the original class, and for the ones that are annotated // with @LayoutlibDelegate, look for a matching method in the delegate class. // The annotation is automatically added by layoutlib_create when it replace a method // by a call to a delegate Method[] originalMethods = originalClass.getDeclaredMethods(); for (Method originalMethod : originalMethods) { // look for methods that are delegated: they have the LayoutlibDelegate annotation if (originalMethod.getAnnotation(LayoutlibDelegate.class) == null) { continue; } // get the signature. Class<?>[] parameters = originalMethod.getParameterTypes(); // if the method is not static, then the class is added as the first parameter // (for "this") if ((originalMethod.getModifiers() & Modifier.STATIC) == 0) { Class<?>[] newParameters = new Class<?>[parameters.length + 1]; newParameters[0] = originalClass; System.arraycopy(parameters, 0, newParameters, 1, parameters.length); parameters = newParameters; } // if the original class is an inner class that's not static, then // we add this on the enclosing class at the beginning if (originalClass.getEnclosingClass() != null && (originalClass.getModifiers() & Modifier.STATIC) == 0) { Class<?>[] newParameters = new Class<?>[parameters.length + 1]; newParameters[0] = originalClass.getEnclosingClass(); System.arraycopy(parameters, 0, newParameters, 1, parameters.length); parameters = newParameters; } try { // try to load the method with the given parameter types. Method delegateMethod = delegateClass.getDeclaredMethod(originalMethod.getName(), parameters); // check that the method has the annotation assertNotNull( String.format( "Delegate method %1$s for class %2$s does not have the @LayoutlibDelegate annotation", delegateMethod.getName(), originalClass.getName()), delegateMethod.getAnnotation(LayoutlibDelegate.class)); // check the return type of the methods match. assertTrue( String.format("Delegate method %1$s.%2$s does not match the corresponding " + "framework method which returns %3$s", delegateClass.getName(), getMethodName(delegateMethod), originalMethod.getReturnType().getName()), delegateMethod.getReturnType() == originalMethod.getReturnType()); // check that the method is static assertTrue( String.format( "Delegate method %1$s for class %2$s is not static", delegateMethod.getName(), originalClass.getName()), (delegateMethod.getModifiers() & Modifier.STATIC) == Modifier.STATIC); // add the method as checked. checkedDelegateMethods.add(delegateMethod); } catch (NoSuchMethodException e) { String name = getMethodName(originalMethod, parameters); fail(String.format("Missing %1$s.%2$s", delegateClass.getName(), name)); } } // look for dead (delegate) code. // This looks for all methods in the delegate class, and if they have the // @LayoutlibDelegate annotation, make sure they have been previously found as a // match for a method in the original class. // If not, this means the method is a delegate for a method that either doesn't exist // anymore or is not delegated anymore. Method[] delegateMethods = delegateClass.getDeclaredMethods(); for (Method delegateMethod : delegateMethods) { // look for methods that are delegates: they have the LayoutlibDelegate annotation if (delegateMethod.getAnnotation(LayoutlibDelegate.class) == null) { continue; } assertTrue( String.format( "Delegate method %1$s.%2$s is not used anymore and must be removed", delegateClass.getName(), getMethodName(delegateMethod)), checkedDelegateMethods.contains(delegateMethod)); } } private String getMethodName(Method method) { return getMethodName(method, method.getParameterTypes()); } private String getMethodName(Method method, Class<?>[] parameters) { // compute a full class name that's long but not too long. StringBuilder sb = new StringBuilder(method.getName() + "("); for (int j = 0; j < parameters.length; j++) { Class<?> theClass = parameters[j]; sb.append(theClass.getName()); int dimensions = 0; while (theClass.isArray()) { dimensions++; theClass = theClass.getComponentType(); } for (int i = 0; i < dimensions; i++) { sb.append("[]"); } if (j < (parameters.length - 1)) { sb.append(","); } } sb.append(")"); return sb.toString(); } }
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/kernel/trunk/kernel-util/src/main/java/org/sakaiproject/util/serialize/Type1BaseResourcePropertiesSerializer.java $ * $Id: Type1BaseResourcePropertiesSerializer.java 101634 2011-12-12 16:44:33Z aaronz@vt.edu $ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.util.serialize; import java.io.DataInputStream; import java.io.DataOutputStream; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Vector; import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer; import org.sakaiproject.entity.api.serialize.EntityParseException; import org.sakaiproject.entity.api.serialize.SerializableEntity; import org.sakaiproject.entity.api.serialize.SerializablePropertiesAccess; /** * @author ieb */ public class Type1BaseResourcePropertiesSerializer implements DataStreamEntitySerializer { private static final int TYPE1 = 1; private static final int BLOCK1 = 100; private static final int BLOCK2 = 101; private static final int BLOCK3 = 102; private static final Log log = LogFactory .getLog(Type1BaseResourcePropertiesSerializer.class); /** * @see org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer#parse(org.sakaiproject.entity.api.serialize.SerializableEntity, * java.io.DataInputStream) */ public void parse(SerializableEntity se, DataInputStream ds) throws EntityParseException { if (!(se instanceof SerializablePropertiesAccess)) { throw new EntityParseException("Cant serialize " + se + " as it is not a SerializableProperties "); } SerializablePropertiesAccess sp = (SerializablePropertiesAccess) se; Map<String, Object> properties = new HashMap<String, Object>(); try { int type = ds.readInt(); if (type == TYPE1) { int block = ds.readInt(); if (block == BLOCK1) { int nprops = ds.readInt(); for (int i = 0; i < nprops; i++) { block = ds.readInt(); switch (block) { case BLOCK2: { String key = ds.readUTF(); String value = ds.readUTF(); properties.put(key, value); } break; case BLOCK3: { String key = ds.readUTF(); int n = ds.readInt(); List<String> l = new Vector<String>(); for (int j = 0; j < n; j++) { l.add(ds.readUTF()); } properties.put(key, l); } break; default: throw new EntityParseException( "Unrecognised block number " + block); } } sp.setSerializableProperties(properties); } else { throw new EntityParseException( "Failed to parse entity, unrecognised block " + block); } } else { throw new EntityParseException( "Cant Parse block, resource properties is not type 1 " + type); } } catch (EntityParseException ep) { throw ep; } catch (Exception ex) { throw new EntityParseException("Failed to parse entity ", ex); } } /** * @see org.sakaiproject.entity.api.serialize.DataStreamEntitySerializer#serialize(org.sakaiproject.entity.api.serialize.SerializableEntity, * java.io.DataOutputStream) */ public void serialize(SerializableEntity se, DataOutputStream ds) throws EntityParseException { if (!(se instanceof SerializablePropertiesAccess)) { throw new EntityParseException("Cant serialize " + se + " as it is not a SerializableProperties "); } SerializablePropertiesAccess sp = (SerializablePropertiesAccess) se; Map<String, Object> properties = sp.getSerializableProperties(); try { ds.writeInt(TYPE1); ds.writeInt(BLOCK1); int ps = properties.keySet().size(); for (Iterator<String> i = properties.keySet().iterator(); i.hasNext();) { if ( i.next() == null ) { ps--; } } ds.writeInt(ps); for (Entry<String, Object> entry : properties.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (value != null) { if (value instanceof String) { ds.writeInt(BLOCK2); ds.writeUTF(key); ds.writeUTF((String) value); } else if (value instanceof List) { ds.writeInt(BLOCK3); ds.writeUTF(key); List<?> l = (List<?>) value; int s = l.size(); for (Iterator<?> il = l.iterator(); il.hasNext();) { if (il.next() == null) { s--; } } ds.writeInt(s); for (Iterator<?> il = l.iterator(); il.hasNext();) { Object v = il.next(); if (v != null) { if (v instanceof String) { ds.writeUTF((String) v); } else { log.warn("Non String found in property list " + v); } } } } else { log.warn("Non String found in property " + value); } } } } catch (Exception ex) { throw new EntityParseException("Failed to serialize properties ", ex); } } }
// SortedDataTable package org.javamoney.examples.ez.common.gui.table; import static org.javamoney.examples.ez.common.CommonIconKeys.SORT_COLUMN_DOWN; import static org.javamoney.examples.ez.common.CommonIconKeys.SORT_COLUMN_UP; import static java.awt.Cursor.HAND_CURSOR; import static java.awt.Cursor.getDefaultCursor; import static java.awt.Cursor.getPredefinedCursor; import static java.awt.event.MouseEvent.BUTTON1; import static javax.swing.SwingConstants.CENTER; import static javax.swing.SwingConstants.LEFT; import java.awt.Component; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.util.Collections; import javax.swing.JLabel; import javax.swing.JTable; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableCellRenderer; /** * This class facilitates using a table and the elements that the table * displays. This class maintains its own data list and facilitates storing, * accessing, and sorting those elements. Columns that can be sorted will * respond to mouse clicks on the column's header, which will invoke a sort and * cause the table to redisplay the rows. * * @param <E> The type of elements that the table will store and display. */ public abstract class SortedDataTable<E> extends DataTable<E> { /** * Constructs a new table. * * @param columns The table columns. * @param model The table model. * @param comparator The comparator that sort. */ public SortedDataTable(String[] columns, DefaultTableModel model, DataTableComparator<E> comparator) { super(columns, model); setComparator(comparator); setSortableColumns(null); setRendererForHeaders(new SortRenderController()); // Add listeners. getTableHeader().addMouseListener(new MouseController()); getTableHeader().addMouseMotionListener(new MouseController()); } ////////////////////////////////////////////////////////////////////////////// // Start of protected methods. ////////////////////////////////////////////////////////////////////////////// /** * This method returns the object that compares the table's data. * * @return The object that compares the table's data. */ protected final DataTableComparator<E> getComparator() { return itsComparator; } /** * This method sets the columns that can be sorted. A value of null indicates * that all columns can be sorted. * * @param columns An array of columns. */ protected final void setSortableColumns(int[] columns) { itsSortableColumns = columns; } /** * This method sorts the elements. */ protected final void sort() { Collections.sort(getList(), getComparator()); } ////////////////////////////////////////////////////////////////////////////// // Start of private methods. ////////////////////////////////////////////////////////////////////////////// private boolean canSortColumn(int column) { boolean result = false; if(getSortableColumns() == null) { result = true; } else { for(int len = 0; len < getSortableColumns().length; ++len) { if(column == getSortableColumns()[len]) { result = true; break; } } } return result; } private int[] getSortableColumns() { return itsSortableColumns; } private void setComparator(DataTableComparator<E> comparator) { itsComparator = comparator; } ////////////////////////////////////////////////////////////////////////////// // Start of inner classes. ////////////////////////////////////////////////////////////////////////////// private class MouseController extends MouseAdapter implements MouseMotionListener { public void mouseDragged(MouseEvent event) { mouseEntered(event); } @Override public void mouseEntered(MouseEvent event) { int column = getColumnIndexAtX(event.getPoint().x); if(canSortColumn(column) == true) { getTableHeader().setCursor(getPredefinedCursor(HAND_CURSOR)); } else { getTableHeader().setCursor(getDefaultCursor()); } } @Override public void mouseExited(MouseEvent event) { getTableHeader().setCursor(getDefaultCursor()); } @Override public void mouseClicked(MouseEvent event) { if(event.getButton() == BUTTON1) { int column = getColumnModel().getColumnIndexAtX(event.getX()); column = getIndexForColumn(column); if(canSortColumn(column) == true) { // Has the column already been selected? If so, then flip the invert // sort switch. if(getComparator().getColumn() == column) { getComparator().setInvertSort(!getComparator().invertSort()); } getComparator().setColumn(column); getTableHeader().repaint(); display(); } } } public void mouseMoved(MouseEvent event) { mouseEntered(event); } } private class SortRenderController implements TableCellRenderer { public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { JLabel label = (JLabel)getDefaultHeaderRenderer().getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); column = getIndexForColumn(column); label.setHorizontalAlignment(CENTER); label.setHorizontalTextPosition(LEFT); // Icon. if(getComparator().getColumn() == column) { if(getComparator().invertSort() == true) { label.setIcon(SORT_COLUMN_UP.getIcon()); } else { label.setIcon(SORT_COLUMN_DOWN.getIcon()); } label.setText("<html><b>" + label.getText() + "</b></html>"); } return label; } } ////////////////////////////////////////////////////////////////////////////// // Start of class members. ////////////////////////////////////////////////////////////////////////////// private DataTableComparator<E> itsComparator; private int[] itsSortableColumns; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.dht; import java.io.Serializable; import java.util.*; import org.apache.commons.lang.ObjectUtils; import org.apache.cassandra.service.StorageService; /** * A representation of the range that a node is responsible for on the DHT ring. * * A Range is responsible for the tokens between (left, right]. */ public class Range extends AbstractBounds implements Comparable<Range>, Serializable { public static final long serialVersionUID = 1L; public Range(Token left, Token right) { this(left, right, StorageService.getPartitioner()); } public Range(Token left, Token right, IPartitioner partitioner) { super(left, right, partitioner); } public static boolean contains(Token left, Token right, Token bi) { if (isWrapAround(left, right)) { /* * We are wrapping around, so the interval is (a,b] where a >= b, * then we have 3 cases which hold for any given token k: * (1) a < k -- return true * (2) k <= b -- return true * (3) b < k <= a -- return false */ if (bi.compareTo(left) > 0) return true; else return right.compareTo(bi) >= 0; } else { /* * This is the range (a, b] where a < b. */ return (bi.compareTo(left) > 0 && right.compareTo(bi) >= 0); } } public boolean contains(Range that) { if (this.left.equals(this.right)) { // full ring always contains all other ranges return true; } boolean thiswraps = isWrapAround(left, right); boolean thatwraps = isWrapAround(that.left, that.right); if (thiswraps == thatwraps) { return left.compareTo(that.left) <= 0 && that.right.compareTo(right) <= 0; } else if (thiswraps) { // wrapping might contain non-wrapping // that is contained if both its tokens are in one of our wrap segments return left.compareTo(that.left) <= 0 || that.right.compareTo(right) <= 0; } else { // (thatwraps) // non-wrapping cannot contain wrapping return false; } } /** * Helps determine if a given point on the DHT ring is contained * in the range in question. * @param bi point in question * @return true if the point contains within the range else false. */ public boolean contains(Token bi) { return contains(left, right, bi); } /** * @param that range to check for intersection * @return true if the given range intersects with this range. */ public boolean intersects(Range that) { return intersectionWith(that).size() > 0; } public static Set<Range> rangeSet(Range ... ranges) { return Collections.unmodifiableSet(new HashSet<Range>(Arrays.asList(ranges))); } /** * @param that * @return the intersection of the two Ranges. this can be two disjoint Ranges if one is wrapping and one is not. * say you have nodes G and M, with query range (D,T]; the intersection is (M-T] and (D-G]. * If there is no intersection, an empty list is returned. */ public Set<Range> intersectionWith(Range that) { if (that.contains(this)) return rangeSet(this); if (this.contains(that)) return rangeSet(that); boolean thiswraps = isWrapAround(left, right); boolean thatwraps = isWrapAround(that.left, that.right); if (!thiswraps && !thatwraps) { // neither wraps. the straightforward case. if (!(left.compareTo(that.right) < 0 && that.left.compareTo(right) < 0)) return Collections.emptySet(); return rangeSet(new Range((Token)ObjectUtils.max(this.left, that.left), (Token)ObjectUtils.min(this.right, that.right))); } if (thiswraps && thatwraps) { // if the starts are the same, one contains the other, which we have already ruled out. assert !this.left.equals(that.left); // two wrapping ranges always intersect. // since we have already determined that neither this nor that contains the other, we have 2 cases, // and mirror images of those case. // (1) both of that's (1, 2] endpoints lie in this's (A, B] right segment: // ---------B--------A--1----2------> // (2) only that's start endpoint lies in this's right segment: // ---------B----1---A-------2------> // or, we have the same cases on the left segement, which we can handle by swapping this and that. return this.left.compareTo(that.left) < 0 ? intersectionBothWrapping(this, that) : intersectionBothWrapping(that, this); } if (thiswraps && !thatwraps) return intersectionOneWrapping(this, that); assert (!thiswraps && thatwraps); return intersectionOneWrapping(that, this); } private static Set<Range> intersectionBothWrapping(Range first, Range that) { Set<Range> intersection = new HashSet<Range>(2); if (that.right.compareTo(first.left) > 0) intersection.add(new Range(first.left, that.right)); intersection.add(new Range(that.left, first.right)); return Collections.unmodifiableSet(intersection); } private static Set<Range> intersectionOneWrapping(Range wrapping, Range other) { Set<Range> intersection = new HashSet<Range>(2); if (other.contains(wrapping.right)) intersection.add(new Range(other.left, wrapping.right)); // need the extra compareto here because ranges are asymmetrical; wrapping.left _is not_ contained by the wrapping range if (other.contains(wrapping.left) && wrapping.left.compareTo(other.right) < 0) intersection.add(new Range(wrapping.left, other.right)); return Collections.unmodifiableSet(intersection); } public AbstractBounds createFrom(Token token) { return new Range(left, token); } /** * Tells if the given range is a wrap around. */ public static boolean isWrapAround(Token left, Token right) { return left.compareTo(right) >= 0; } public int compareTo(Range rhs) { /* * If the range represented by the "this" pointer * is a wrap around then it is the smaller one. */ if ( isWrapAround(left, right) ) return -1; if ( isWrapAround(rhs.left, rhs.right) ) return 1; return right.compareTo(rhs.right); } public static boolean isTokenInRanges(Token token, Iterable<Range> ranges) { assert ranges != null; for (Range range : ranges) { if (range.contains(token)) { return true; } } return false; } @Override public boolean equals(Object o) { if (!(o instanceof Range)) return false; Range rhs = (Range)o; return left.equals(rhs.left) && right.equals(rhs.right); } @Override public String toString() { return "(" + left + "," + right + "]"; } public boolean isWrapAround() { return isWrapAround(left, right); } }
/* * Copyright 2016 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.deprecated; import com.google.api.services.compute.model.Disk; import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; import java.io.Serializable; import java.util.Objects; /** * Base class for Google Compute Engine disk configurations. A disk can be used as primary storage * for your virtual machine instances. Use {@link StandardDiskConfiguration} to create a standard * disk given a disk type and size. Use {@link ImageDiskConfiguration} to create a disk from a * Compute Engine disk image. Use {@link SnapshotDiskConfiguration} to create a disk from a Compute * Engine disk snapshot. * * @see <a href="https://cloud.google.com/compute/docs/disks/">Block Storage</a> */ public abstract class DiskConfiguration implements Serializable { private static final long serialVersionUID = -1783061701255428417L; private final Type type; private final Long sizeGb; private final DiskTypeId diskType; /** * Type of a Google Compute Engine disk configuration. */ public enum Type { /** * A Google Compute Engine standard disk configuration. */ STANDARD, /** * A Google Compute Engine disk configuration that creates a disk from an image. */ IMAGE, /** * A Google Compute Engine disk configuration that creates a disk from a snapshot. */ SNAPSHOT } /** * Base builder for disk configurations. * * @param <T> the disk configuration type * @param <B> the disk configuration builder */ public abstract static class Builder<T extends DiskConfiguration, B extends Builder<T, B>> { private Type type; private Long sizeGb; private DiskTypeId diskType; Builder(Type type) { this.type = type; } Builder(DiskConfiguration diskConfiguration) { this.type = diskConfiguration.type; this.sizeGb = diskConfiguration.sizeGb; this.diskType = diskConfiguration.diskType; } Builder(Type type, Disk diskPb) { this.type = type; this.sizeGb = diskPb.getSizeGb(); if (diskPb.getType() != null) { this.diskType = DiskTypeId.fromUrl(diskPb.getType()); } } @SuppressWarnings("unchecked") protected B self() { return (B) this; } B setType(Type type) { this.type = type; return self(); } /** * Sets the size of the persistent disk, in GB. */ public B setSizeGb(Long sizeGb) { this.sizeGb = sizeGb; return self(); } /** * Sets the identity of the disk type. If not set {@code pd-standard} will be used. */ public B setDiskType(DiskTypeId diskType) { this.diskType = diskType; return self(); } /** * Creates an object. */ public abstract T build(); } DiskConfiguration(Builder builder) { this.type = builder.type; this.sizeGb = builder.sizeGb; this.diskType = builder.diskType; } /** * Returns the disk configuration's type. This method returns {@link Type#STANDARD} for a standard * configuration that creates a disk given its type and size. This method returns * {@link Type#SNAPSHOT} for a configuration that creates a disk from a Google Compute Engine * snapshot. This method returns {@link Type#IMAGE} for a configuration that creates a disk * from a Google Compute Engine image. */ public Type getType() { return type; } /** * Returns the size of the persistent disk, in GB. */ public Long getSizeGb() { return sizeGb; } /** * Returns the identity of the disk type. */ public DiskTypeId getDiskType() { return diskType; } /** * Returns a builder for the object. */ public abstract Builder toBuilder(); ToStringHelper toStringHelper() { return MoreObjects.toStringHelper(this) .add("type", type) .add("sizeGb", sizeGb) .add("diskType", diskType); } @Override public String toString() { return toStringHelper().toString(); } final int baseHashCode() { return Objects.hash(type, sizeGb, diskType); } final boolean baseEquals(DiskConfiguration diskConfiguration) { return diskConfiguration != null && getClass().equals(diskConfiguration.getClass()) && Objects.equals(toPb(), diskConfiguration.toPb()); } abstract DiskConfiguration setProjectId(String projectId); Disk toPb() { Disk diskPb = new Disk(); diskPb.setSizeGb(sizeGb); if (diskType != null) { diskPb.setType(diskType.getSelfLink()); } return diskPb; } @SuppressWarnings("unchecked") static <T extends DiskConfiguration> T fromPb(Disk diskPb) { if (diskPb.getSourceImage() != null) { return (T) ImageDiskConfiguration.fromPb(diskPb); } else if (diskPb.getSourceSnapshot() != null) { return (T) SnapshotDiskConfiguration.fromPb(diskPb); } return (T) StandardDiskConfiguration.fromPb(diskPb); } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.traits.core.factmodel; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.BitSet; import java.util.Map; import java.util.Set; import org.drools.core.factmodel.BuildUtils; import org.drools.core.factmodel.ClassDefinition; import org.drools.core.factmodel.FieldDefinition; import org.drools.core.factmodel.traits.Thing; import org.drools.core.factmodel.traits.TraitType; import org.drools.core.factmodel.traits.TraitableBean; import org.drools.traits.core.factmodel.TraitBuilderUtil.MixinInfo; import org.drools.core.util.Triple; import org.drools.core.util.TripleFactory; import org.drools.core.util.TripleStore; import org.kie.api.definition.type.FactField; import org.mvel2.asm.ClassVisitor; import org.mvel2.asm.ClassWriter; import org.mvel2.asm.FieldVisitor; import org.mvel2.asm.Label; import org.mvel2.asm.MethodVisitor; import org.mvel2.asm.Type; import static org.drools.traits.core.factmodel.TraitBuilderUtil.buildMixinMethods; import static org.drools.traits.core.factmodel.TraitBuilderUtil.findMixinInfo; import static org.drools.traits.core.factmodel.TraitBuilderUtil.getMixinName; import static org.drools.core.rule.builder.dialect.asm.ClassGenerator.createClassWriter; public class TraitTripleProxyClassBuilderImpl extends AbstractProxyClassBuilderImpl implements TraitProxyClassBuilder, Serializable { public byte[] buildClass( ClassDefinition core, ClassLoader classLoader ) throws IOException, SecurityException, IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchFieldException { FieldVisitor fv; MethodVisitor mv; // get the method bitmask BitSet mask = traitRegistryImpl.getFieldMask(getTrait().getName(), core.getDefinedClass().getName() ); String name = TraitFactoryImpl.getPropertyWrapperName(getTrait(), core ); String masterName = TraitFactoryImpl.getProxyName(getTrait(), core ); Class<?> traitClass = getTrait().getDefinedClass(); String internalWrapper = BuildUtils.getInternalType(name); String internalProxy = BuildUtils.getInternalType(masterName); String internalCore = Type.getInternalName(core.getDefinedClass()); String descrCore = Type.getDescriptor(core.getDefinedClass()); String internalTrait = Type.getInternalName(getTrait().getDefinedClass()); MixinInfo mixinInfo = findMixinInfo( traitClass ); ClassWriter cw = createClassWriter( classLoader, ACC_PUBLIC + ACC_SUPER, internalProxy, null, Type.getInternalName( proxyBaseClass ), new String[] { internalTrait, Type.getInternalName( Externalizable.class ) } ); { fv = cw.visitField(ACC_PRIVATE + ACC_FINAL + ACC_STATIC, TraitType.traitNameField, Type.getDescriptor(String.class ), null, null ); fv.visitEnd(); } { fv = cw.visitField( ACC_PUBLIC, "object", descrCore, null, null ); fv.visitEnd(); } { fv = cw.visitField( ACC_PRIVATE, "store", Type.getDescriptor( TripleStore.class ), null, null ); fv.visitEnd(); } { fv = cw.visitField( ACC_PRIVATE, "storeId", Type.getDescriptor( String.class ), null, null); fv.visitEnd(); } if ( mixinInfo != null ) { for ( Class<?> mixinClass : mixinInfo.mixinClasses ) { { fv = cw.visitField( ACC_PRIVATE, getMixinName(mixinClass), Type.getDescriptor( mixinClass ), null, null ); fv.visitEnd(); } } } { mv = cw.visitMethod( ACC_PUBLIC, "<init>", "(" + descrCore + Type.getDescriptor( TripleStore.class ) + Type.getDescriptor( TripleFactory.class ) + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( BitSet.class ) + Type.getDescriptor( boolean.class ) +")V", null, null ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( TripleStore.class ), "getId", "()" + Type.getDescriptor( String.class ), false ); mv.visitFieldInsn( PUTFIELD, internalProxy, "storeId", Type.getDescriptor( String.class ) ); buildConstructorCore( mv, internalProxy, internalWrapper, internalCore, descrCore, mixinInfo ); initFields( mv, internalProxy ); mv.visitInsn( RETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } { mv = cw.visitMethod(ACC_PUBLIC, "setObject", "(" + Type.getDescriptor( Object.class ) + ")V", null, null); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( CHECKCAST, internalCore ); mv.visitFieldInsn( PUTFIELD, internalProxy, "object", descrCore ); mv.visitInsn( RETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } { mv = cw.visitMethod( ACC_PUBLIC, "writeExternal", "(" + Type.getDescriptor( ObjectOutput.class )+ ")V", null, new String[] { Type.getInternalName( IOException.class ) } ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "writeExternal", "(" + Type.getDescriptor( ObjectOutput.class ) + ")V", false ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn(INVOKEVIRTUAL, internalProxy, "getObject", "()" + Type.getDescriptor( TraitableBean.class ), false ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V", true ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, internalProxy, "storeId", Type.getDescriptor( String.class ) ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V", true ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, internalProxy, "store", Type.getDescriptor( TripleStore.class ) ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V", true ); mv.visitInsn( RETURN ); // mv.visitMaxs( 2, 2 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } { mv = cw.visitMethod( ACC_PUBLIC, "readExternal", "(" + Type.getDescriptor( ObjectInput.class ) + ")V", null, new String[] { Type.getInternalName( IOException.class ), Type.getInternalName( ClassNotFoundException.class ) } ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "readExternal", "(" + Type.getDescriptor( ObjectInput.class ) + ")V", false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ), true ); mv.visitTypeInsn( CHECKCAST, internalCore ); mv.visitFieldInsn( PUTFIELD, internalProxy, "object", descrCore ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ), true ); mv.visitTypeInsn( CHECKCAST, Type.getInternalName( String.class ) ); mv.visitFieldInsn( PUTFIELD, internalProxy, "storeId", Type.getDescriptor( String.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ), true ); mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TripleStore.class ) ); mv.visitFieldInsn( PUTFIELD, internalProxy, "store", Type.getDescriptor( TripleStore.class ) ); mv.visitInsn( RETURN ); // mv.visitMaxs( 3, 2 ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } helpBuildClass( core, cw, internalProxy, descrCore, mask ); buildProxyAccessors( mask, cw, masterName, core, mixinInfo ); boolean hasKeys = false; for ( FactField ff : getTrait().getFields() ) { if ( ff.isKey() ) { hasKeys = true; break; } } if ( ! hasKeys ) { buildEqualityMethods( cw, masterName, core.getClassName() ); } else { buildKeyedEqualityMethods( cw, getTrait(), masterName ); } buildMixinMethods( masterName, mixinInfo, cw ); buildCommonMethods( cw, masterName ); buildExtendedMethods( cw, getTrait(), core ); buildShadowMethods( cw, trait, core ); cw.visitEnd(); return cw.toByteArray(); } protected void buildShadowMethods( ClassWriter cw, ClassDefinition trait, ClassDefinition core ) { for ( Method m : trait.getDefinedClass().getMethods() ) { if ( ! TraitFactoryImpl.excludeFromShadowing(m, trait ) ) { for ( Method q : core.getDefinedClass().getMethods() ) { if ( TraitFactoryImpl.isCompatible(m, q ) ) { buildShadowMethod( cw, trait, core, m ); } } } } } protected void buildConstructorCore( MethodVisitor mv, String internalProxy, String internalWrapper, String internalCore, String descrCore, MixinInfo mixinInfo ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( proxyBaseClass ), "<init>", "()V", false ); if (mixinInfo != null) { for ( Class<?> mixinClass : mixinInfo.mixinClasses ) { try { // Constructor con = mixinClass.getConstructor( trait.getDefinedClass() ); Class actualArg = getPossibleConstructor( mixinClass, trait.getDefinedClass() ); mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, Type.getInternalName( mixinClass ) ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( mixinClass ), "<init>", "(" + Type.getDescriptor( actualArg ) + ")V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, getMixinName( mixinClass ), Type.getDescriptor( mixinClass ) ); } catch (NoSuchMethodException nsme) { mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, Type.getInternalName( mixinClass ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( mixinClass ), "<init>", "()V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, getMixinName( mixinClass ), Type.getDescriptor( mixinClass ) ); } } } mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitFieldInsn( PUTFIELD, internalProxy, "object", descrCore ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitFieldInsn( PUTFIELD, internalProxy, "store", Type.getDescriptor( TripleStore.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 3 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalProxy, "setTripleFactory", "(" + Type.getDescriptor( TripleFactory.class ) + ")V", false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 4 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalProxy, "setTypeCode", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( BitSet.class ) ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitTypeInsn( NEW, internalWrapper ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitVarInsn( ALOAD, 3 ); mv.visitMethodInsn( INVOKESPECIAL, internalWrapper, "<init>", "(" + descrCore + Type.getDescriptor( TripleStore.class ) + Type.getDescriptor( TripleFactory.class ) + ")V", false ); mv.visitFieldInsn( PUTFIELD, internalProxy, "fields", Type.getDescriptor( Map.class ) ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_getDynamicProperties", "()" + Type.getDescriptor( Map.class ), false ); Label l0 = new Label(); mv.visitJumpInsn( IFNONNULL, l0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( NEW, Type.getInternalName( TripleBasedBean.class ) ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitVarInsn( ALOAD, 3 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( TripleBasedBean.class ), "<init>", "(" + Type.getDescriptor( Object.class ) + Type.getDescriptor( TripleStore.class ) + Type.getDescriptor( TripleFactory.class ) + ")V", false ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setDynamicProperties", "(" + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitLabel( l0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_getTraitMap", "()" + Type.getDescriptor( Map.class ), false ); Label l1 = new Label(); mv.visitJumpInsn( IFNONNULL, l1 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( NEW, Type.getInternalName( TraitTypeMapImpl.class ) ); mv.visitInsn( DUP ); mv.visitTypeInsn( NEW, Type.getInternalName( TripleBasedTypes.class ) ); mv.visitInsn( DUP ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitVarInsn( ALOAD, 3 ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( TripleBasedTypes.class ), "<init>", "(" + Type.getDescriptor( Object.class ) + Type.getDescriptor( TripleStore.class ) + Type.getDescriptor( TripleFactory.class ) + ")V", false ); mv.visitMethodInsn(INVOKESPECIAL, Type.getInternalName( TraitTypeMapImpl.class ), "<init>", "(" + Type.getDescriptor(Map.class )+ ")V", false ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setTraitMap", "(" + Type.getDescriptor( Map.class ) + ")V", false ); mv.visitLabel( l1 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitVarInsn( ALOAD, 5 ); mv.visitMethodInsn( INVOKEVIRTUAL, internalCore, "_setBottomTypeCode", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( BitSet.class ) ), false ); // core.addTrait mv.visitVarInsn( ALOAD, 1 ); mv.visitLdcInsn( trait.getName().endsWith(TraitFactoryImpl.SUFFIX ) ? trait.getName().replace(TraitFactoryImpl.SUFFIX , "" ) : trait.getName() ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn(INVOKEVIRTUAL, internalCore, "addTrait", Type.getMethodDescriptor( Type.VOID_TYPE, Type.getType( String.class ), Type.getType( Thing.class ) ), false ); } protected void initFields( MethodVisitor mv, String internalProxy ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ILOAD, 6 ); mv.visitMethodInsn( INVOKESPECIAL, internalProxy, "synchFields", Type.getMethodDescriptor( Type.VOID_TYPE, Type.BOOLEAN_TYPE ), false ); } protected void buildProxyAccessors( BitSet mask, ClassWriter cw, String masterName, ClassDefinition core, MixinInfo mixinInfo) { int j = 0; for ( FieldDefinition field : getTrait().getFieldsDefinitions() ) { boolean isSoftField = TraitRegistryImpl.isSoftField(field, j++, mask ); buildProxyAccessor( cw, masterName, core, mixinInfo, field, isSoftField ); } } protected void buildProxyAccessor( ClassWriter cw, String masterName, ClassDefinition core, MixinInfo mixinInfo, FieldDefinition field, boolean isSoftField ) { if ( core.isFullTraiting() ) { buildLogicalGetter( cw, field, masterName, core ); if ( ! isSoftField ) { buildHardSetter( cw, field, masterName, trait, core ); } else { buildSoftSetter( cw, field, masterName, core ); } } else { if ( isSoftField ) { if (mixinInfo == null || !mixinInfo.isMixinGetter( field )) { buildSoftGetter( cw, field, masterName ); buildSoftSetter( cw, field, masterName, core ); } } else { buildHardGetter( cw, field, masterName, trait, core ); buildHardSetter( cw, field, masterName, trait, core ); } } } protected void buildHardGetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition proxy, ClassDefinition core ) { buildHardGetter( cw, field, masterName, proxy, core, BuildUtils.getterName( field.getName(), field.getTypeName() ), ACC_PUBLIC ); } protected void buildHardGetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition proxy, ClassDefinition core, String getterName, int accessMode ) { Class fieldType = field.getType(); MethodVisitor mv = cw.visitMethod( accessMode, getterName, "()" + Type.getDescriptor( fieldType ), null, null); mv.visitCode(); TraitFactoryImpl.invokeExtractor(mv, masterName, core, field ); if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitTypeInsn( CHECKCAST, Type.getInternalName( fieldType ) ); } mv.visitInsn( BuildUtils.returnType ( fieldType.getName() ) ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildHardSetter( ClassVisitor cw, FieldDefinition field, String masterName, ClassDefinition trait, ClassDefinition core ) { buildHardSetter(cw, field, masterName, trait, core, BuildUtils.setterName( field.getName()), ACC_PUBLIC ); } private void buildSoftSetter( ClassWriter cw, FieldDefinition field, String masterName, ClassDefinition core ) { buildSoftSetter(cw, field, masterName, core, BuildUtils.setterName( field.getName()), ACC_PUBLIC ); } protected void buildSoftSetter( ClassVisitor cw, FieldDefinition field, String proxy, ClassDefinition core, String setterName, int accessMode ) { String type = field.getTypeName(); MethodVisitor mv = cw.visitMethod( accessMode, setterName, "(" + Type.getDescriptor( field.getType() ) + ")V", null, null ); mv.visitCode(); if ( core.isFullTraiting() ) { logicalSetter( mv, field, proxy, core ); } mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( proxy ), "store", Type.getDescriptor( TripleStore.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitLdcInsn( field.resolveAlias() ); mv.visitVarInsn( BuildUtils.varType( type ), 1 ); if ( BuildUtils.isPrimitive( type ) ) { TraitFactoryImpl.valueOf(mv, type ); } mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( proxy ), "property", "(" + Type.getDescriptor( String.class ) + Type.getDescriptor( Object.class ) + ")" + Type.getDescriptor( Triple.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( TripleStore.class ), "put", "(" + Type.getDescriptor( Triple.class )+ ")Z", false ); mv.visitInsn( POP ); mv.visitInsn( RETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } private void buildSoftGetter( ClassWriter cw, FieldDefinition field, String masterName ) { buildSoftGetter( cw, field, masterName, BuildUtils.getterName( field.getName(), field.getTypeName() ), ACC_PUBLIC ); } protected void buildSoftGetter( ClassVisitor cw, FieldDefinition field, String proxy, String getterName, int accessMode ) { String type = field.getTypeName(); MethodVisitor mv = cw.visitMethod( accessMode, getterName, "()"+ Type.getDescriptor( field.getType() ), null, null ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( proxy ), "store", Type.getDescriptor( TripleStore.class ) ); mv.visitVarInsn( ALOAD, 0 ); mv.visitLdcInsn( field.resolveAlias() ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( proxy ), "propertyKey", "(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( Triple.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( TripleStore.class ), "get", "(" + Type.getDescriptor( Triple.class ) + ")" + Type.getDescriptor( Triple.class ), false); String actualType = BuildUtils.isPrimitive( type ) ? BuildUtils.box( type ) : type; mv.visitVarInsn( ASTORE, 1 ); mv.visitVarInsn( ALOAD, 1 ); Label l0 = new Label(); mv.visitJumpInsn( IFNULL, l0 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Triple.class ), "getValue", "()" + Type.getDescriptor( Object.class ), true ); mv.visitVarInsn( ASTORE, 2 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitTypeInsn( INSTANCEOF, BuildUtils.getInternalType( actualType ) ); Label l1 = new Label(); mv.visitJumpInsn( IFEQ, l1 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitTypeInsn( CHECKCAST, BuildUtils.getInternalType( actualType ) ); if ( BuildUtils.isPrimitive( type ) ) { TraitFactoryImpl.primitiveValue(mv, type ); mv.visitInsn( BuildUtils.returnType( type ) ); mv.visitLabel( l1 ); mv.visitInsn( BuildUtils.zero( type ) ); mv.visitInsn( BuildUtils.returnType( type ) ); mv.visitLabel( l0 ); mv.visitInsn( BuildUtils.zero( type ) ); mv.visitInsn( BuildUtils.returnType( type ) ); } else { mv.visitInsn( ARETURN ); mv.visitLabel( l1 ); mv.visitInsn( ACONST_NULL ); mv.visitInsn( ARETURN ); mv.visitLabel( l0 ); mv.visitInsn( ACONST_NULL ); mv.visitInsn( ARETURN ); } mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildKeyedEquals( ClassVisitor cw, ClassDefinition classDef, String proxyType ) { MethodVisitor mv; mv = cw.visitMethod( ACC_PUBLIC, "equals", "(" + Type.getDescriptor( Object.class ) + ")Z", null, null ); mv.visitCode(); mv.visitVarInsn( ALOAD, 0 ); mv.visitVarInsn( ALOAD, 1 ); Label l0 = new Label(); mv.visitJumpInsn( IF_ACMPNE, l0 ); mv.visitInsn( ICONST_1 ); mv.visitInsn( IRETURN ); mv.visitLabel( l0 ); mv.visitVarInsn( ALOAD, 1 ); Label l1 = new Label(); mv.visitJumpInsn( IFNULL, l1 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "getClass", "()" + Type.getDescriptor( Class.class ), false ); mv.visitVarInsn( ALOAD, 1 ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "getClass", "()" + Type.getDescriptor( Class.class ), false ); Label l2 = new Label(); mv.visitJumpInsn( IF_ACMPEQ, l2 ); mv.visitLabel( l1 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l2 ); mv.visitVarInsn( ALOAD, 1 ); mv.visitTypeInsn( CHECKCAST, proxyType ); mv.visitVarInsn( ASTORE, 2 ); int x = 2; for ( FieldDefinition field : classDef.getFieldsDefinitions() ) { if ( field.isKey() ) { if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); Label l11 = new Label(); mv.visitJumpInsn( IFNULL, l11 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( field.getTypeName() ), "equals", "(" + Type.getDescriptor( Object.class ) + ")Z", false ); Label l12 = new Label(); mv.visitJumpInsn( IFNE, l12 ); Label l13 = new Label(); mv.visitJumpInsn( GOTO, l13 ); mv.visitLabel( l11 ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitJumpInsn( IFNULL, l12 ); mv.visitLabel( l13 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l12 ); } else if ( "double".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Double.class ), "compare", "(DD)I", false ); Label l5 = new Label(); mv.visitJumpInsn( IFEQ, l5 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l5 ); x = Math.max( x, 4 ); } else if ( "float".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Float.class ), "compare", "(FF)I", false ); Label l6 = new Label(); mv.visitJumpInsn( IFEQ, l6 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l6 ); } else if ( "long".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitInsn( LCMP ); Label l8 = new Label(); mv.visitJumpInsn( IFEQ, l8 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l8 ); x = Math.max( x, 4 ); } else { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 2 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); Label l4 = new Label(); mv.visitJumpInsn( IF_ICMPEQ, l4 ); mv.visitInsn( ICONST_0 ); mv.visitInsn( IRETURN ); mv.visitLabel( l4 ); } } } mv.visitInsn( ICONST_1 ); mv.visitInsn( IRETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } protected void buildKeyedHashCode( ClassVisitor cw, ClassDefinition classDef, String proxyType ) { MethodVisitor mv; { mv = cw.visitMethod( ACC_PUBLIC, "hashCode", "()I", null, null ); mv.visitCode(); mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ISTORE, 1 ); int x = 2; int y = 2; for ( FieldDefinition field : classDef.getFieldsDefinitions() ) { if ( field.isKey() ) { if ( ! BuildUtils.isPrimitive( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); Label l8 = new Label(); mv.visitJumpInsn( IFNULL, l8 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( field.getTypeName() ), "hashCode", "()I", false ); Label l9 = new Label(); mv.visitJumpInsn( GOTO, l9 ); mv.visitLabel( l8 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l9 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } else if ( "double".equals( field.getTypeName() ) ) { mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitInsn( DCONST_0 ); mv.visitInsn( DCMPL ); Label l2 = new Label(); mv.visitJumpInsn( IFEQ, l2 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Double.class ), "doubleToLongBits", "(D)J", false ); Label l3 = new Label(); mv.visitJumpInsn( GOTO, l3 ); mv.visitLabel( l2 ); mv.visitInsn( LCONST_0 ); mv.visitLabel( l3 ); mv.visitVarInsn( LSTORE, 2 ); mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( LLOAD, 2 ); mv.visitVarInsn( LLOAD, 2 ); mv.visitIntInsn( BIPUSH, 32 ); mv.visitInsn( LUSHR ); mv.visitInsn( LXOR ); mv.visitInsn( L2I ); mv.visitInsn( IADD); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 6, x ); y = Math.max( 4, y ); } else if ( "boolean".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); Label l4 = new Label(); mv.visitJumpInsn( IFEQ, l4 ); mv.visitInsn( ICONST_1 ); Label l5 = new Label(); mv.visitJumpInsn( GOTO, l5 ); mv.visitLabel( l4 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l5 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } else if ( "float".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitInsn( FCONST_0 ); mv.visitInsn( FCMPL ); Label l6 = new Label(); mv.visitJumpInsn( IFEQ, l6 ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitMethodInsn( INVOKESTATIC, Type.getInternalName( Float.class ), "floatToIntBits", "(F)I", false ); Label l7 = new Label(); mv.visitJumpInsn( GOTO, l7 ); mv.visitLabel( l6 ); mv.visitInsn( ICONST_0 ); mv.visitLabel( l7 ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 3, x ); } else if ( "long".equals( field.getTypeName() ) ) { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ); mv.visitIntInsn( BIPUSH, 32 ); mv.visitInsn( LUSHR ); mv.visitInsn( LXOR ); mv.visitInsn( L2I ); mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); x = Math.max( 6, x ); } else { mv.visitIntInsn( BIPUSH, 31 ); mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IMUL ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, proxyType, BuildUtils.getterName( field.getName(), field.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( field.getTypeName() ), false ) ; mv.visitInsn( IADD ); mv.visitVarInsn( ISTORE, 1 ); } } } mv.visitVarInsn( ILOAD, 1 ); mv.visitInsn( IRETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } } protected void buildCommonMethods( ClassWriter cw, String proxy ) { MethodVisitor mv; { mv = cw.visitMethod( ACC_PUBLIC, "toString", "()" + Type.getDescriptor( String.class ), null, null ); mv.visitCode(); mv.visitTypeInsn( NEW, Type.getInternalName( StringBuilder.class ) ); mv.visitInsn( DUP ); mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( StringBuilder.class ), "<init>", "()V", false ); mv.visitLdcInsn( "(@" + proxy + ") : " ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "append", "(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( StringBuilder.class ), false ); mv.visitVarInsn( ALOAD, 0 ); mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( proxy ), "getFields", "()" + Type.getDescriptor( Map.class ), false ); mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ) , "entrySet", "()" + Type.getDescriptor( Set.class ), true ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( Object.class ), "toString", "()" + Type.getDescriptor( String.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "append", "(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( StringBuilder.class ), false ); mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( StringBuilder.class ), "toString", "()" + Type.getDescriptor( String.class ), false ); mv.visitInsn( ARETURN ); mv.visitMaxs( 0, 0 ); mv.visitEnd(); } } protected void buildExtendedMethods( ClassWriter cw, ClassDefinition trait, ClassDefinition core ) { buildSynchFields(cw, TraitFactoryImpl.getProxyName(trait, core ), trait, core ); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package agent.dbgeng.manager.impl; import static ghidra.async.AsyncUtils.*; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.function.Supplier; import com.google.common.collect.RangeSet; import agent.dbgeng.dbgeng.*; import agent.dbgeng.dbgeng.DebugClient.DebugAttachFlags; import agent.dbgeng.manager.*; import agent.dbgeng.manager.DbgManager.ExecSuffix; import agent.dbgeng.manager.cmd.*; import ghidra.async.TypeSpec; import ghidra.comm.util.BitmaskSet; import ghidra.dbg.target.TargetAttachable; import ghidra.util.Msg; public class DbgProcessImpl implements DbgProcess { private final Map<DebugThreadId, DbgThreadImpl> threads = new LinkedHashMap<>(); private final Map<DebugThreadId, DbgThread> unmodifiableThreads = Collections.unmodifiableMap(threads); private final Map<String, DbgModuleImpl> modules = new LinkedHashMap<>(); private final Map<String, DbgModule> unmodifiableModules = Collections.unmodifiableMap(modules); private final NavigableMap<Long, DbgSectionImpl> mappings = new TreeMap<>(); private final NavigableMap<Long, DbgSectionImpl> unmodifiableMappings = Collections.unmodifiableNavigableMap(mappings); private DbgManagerImpl manager; private DebugProcessId id; private Long pid; private Long exitCode; /** * Construct a new inferior * * @param manager the manager creating the process * @param id the dbgeng-assigned process ID */ public DbgProcessImpl(DbgManagerImpl manager, DebugProcessId id, long pid) { this.manager = manager; this.id = id; this.pid = pid; } public DbgProcessImpl(DbgManagerImpl manager) { this.manager = manager; } @Override public String toString() { return "<DbgProcess id=" + id + ",pid=" + pid + ",exitCode=" + exitCode + ">"; } @Override public DebugProcessId getId() { return id; } public void setId(DebugProcessId id) { this.id = id; } @Override public Long getPid() { return pid; } /** * Set the exit code * * @param exitCode the exit code (status or signal) */ public void setExitCode(Long exitCode) { this.exitCode = exitCode; } @Override public Long getExitCode() { return exitCode; } /** * Add this process to the manager's list of processes, because of a given cause * * @param cause the cause of the new inferior */ public void add() { manager.processes.put(id, this); //manager.getEventListeners().fire.processAdded(this, DbgCause.Causes.UNCLAIMED); //manager.addProcess(this, cause); } /** * Remove this process from the manager's list of processes, because of a given cause * * @param cause the cause of removal */ public void remove(DbgCause cause) { manager.removeProcess(id, cause); } @Override public CompletableFuture<Void> remove() { return manager.removeProcess(this); } /** * Use {@link DbgThreadImpl#add()} instead * * @param thread the thread to add */ public void addThread(DbgThreadImpl thread) { DbgThreadImpl exists = threads.get(thread.getId()); if (exists != null) { Msg.warn(this, "Adding pre-existing thread " + exists); //throw new IllegalArgumentException("There is already thread " + exists); } threads.put(thread.getId(), thread); } @Override public DbgThreadImpl getThread(DebugThreadId tid) { DbgThreadImpl result = threads.get(tid); if (result == null) { throw new IllegalArgumentException("There is no thread with id " + tid); } return result; } /** * Use {@link DbgThreadImpl#remove()} instead * * @param tid the ID of the thread to remove */ public void removeThread(DebugThreadId tid) { if (threads.remove(tid) == null) { throw new IllegalArgumentException("There is no thread with id " + tid); } } /** * Use {@link DbgModuleImpl#add()} instead * * @param module the thread to add */ public void addModule(DbgModuleImpl module) { DbgModuleImpl exists = modules.get(module.getInfo().toString()); if (exists != null) { throw new IllegalArgumentException("There is already module " + exists); } modules.put(module.getInfo().toString(), module); } @Override public DbgModuleImpl getModule(String id) { DbgModuleImpl result = modules.get(id); if (result == null) { throw new IllegalArgumentException("There is no module with id " + id); } return result; } /** * Use {@link DbgModulesImpl#remove()} instead * * @param id the ID of the thread to remove */ public void removeModule(String id) { if (modules.remove(id) == null) { throw new IllegalArgumentException("There is no module with id " + id); } } @Override public Map<DebugThreadId, DbgThread> getKnownThreads() { return unmodifiableThreads; } public Map<DebugThreadId, DbgThreadImpl> getKnownThreadsImpl() { return threads; } @Override public CompletableFuture<Map<DebugThreadId, DbgThread>> listThreads() { return manager.execute(new DbgListThreadsCommand(manager, this)); } @Override public Map<String, DbgModule> getKnownModules() { return unmodifiableModules; } @Override public CompletableFuture<Map<String, DbgModule>> listModules() { return manager.execute(new DbgListModulesCommand(manager, this)); } @Override public Map<Long, DbgSectionImpl> getKnownMappings() { return unmodifiableMappings; } @Override public CompletableFuture<Map<Long, DbgSectionImpl>> listMappings() { return manager.execute(new DbgListMappingsCommand(manager, this)); } @Override public CompletableFuture<Void> setActive() { return manager.setActiveProcess(this); } @Override public CompletableFuture<Void> fileExecAndSymbols(String file) { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgFileExecAndSymbolsCommand(manager, file)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<DbgThread> run() { return sequence(TypeSpec.cls(DbgThread.class)).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgRunCommand(manager)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<Set<DbgThread>> attach(long toPid) { return sequence(TypeSpec.cls(DbgThread.class).set()).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { pid = toPid; // TODO: Wait for successful completion? manager.execute( new DbgAttachCommand(manager, this, BitmaskSet.of(DebugAttachFlags.DEFAULT))) .handle(seq::exit); }).finish(); } @Override public CompletableFuture<Set<DbgThread>> reattach(TargetAttachable attachable) { return sequence(TypeSpec.cls(DbgThread.class).set()).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute( new DbgAttachCommand(manager, this, BitmaskSet.of(DebugAttachFlags.EXISTING))) .handle(seq::exit); }).finish(); } @Override public CompletableFuture<Void> detach() { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgDetachCommand(manager, this)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<Void> kill() { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgKillCommand(manager)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<Void> cont() { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgContinueCommand(manager)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<Void> step(ExecSuffix suffix) { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgStepCommand(manager, null, suffix)).handle(seq::exit); }).finish(); } @Override public CompletableFuture<Void> step(Map<String, ?> args) { return sequence(TypeSpec.VOID).then((seq) -> { setActive().handle(seq::next); }).then((seq) -> { manager.execute(new DbgStepCommand(manager, null, args)).handle(seq::exit); }).finish(); } protected <T> CompletableFuture<T> preferThread( Function<DbgThreadImpl, CompletableFuture<T>> viaThread, Supplier<CompletableFuture<T>> viaThis) { Optional<DbgThreadImpl> first = threads.values().stream().findFirst(); if (first.isPresent()) { return viaThread.apply(first.get()); } return setActive().thenCompose(__ -> viaThis.get()); } @Override public CompletableFuture<RangeSet<Long>> readMemory(long addr, ByteBuffer buf, int len) { // I can't imagine this working without a thread.... return preferThread(t -> t.readMemory(addr, buf, len), () -> manager.execute(new DbgReadMemoryCommand(manager, addr, buf, len))); } @Override public CompletableFuture<Void> writeMemory(long addr, ByteBuffer buf, int len) { // I can't imagine this working without a thread.... return preferThread(t -> t.writeMemory(addr, buf, len), () -> manager.execute(new DbgWriteMemoryCommand(manager, addr, buf, len))); } @Override public CompletableFuture<String> consoleCapture(String command) { // TODO Auto-generated method stub return null; } protected void moduleLoaded(DebugModuleInfo info) { if (!modules.containsKey(info.getModuleName())) { DbgModuleImpl module = new DbgModuleImpl(manager, this, info); modules.put(info.toString(), module); } } protected void moduleUnloaded(DebugModuleInfo info) { modules.remove(info.toString()); } protected void threadCreated(DbgThreadImpl thread) { threads.put(thread.getId(), thread); } public void threadExited(DebugThreadId id) { threads.remove(id); } @Override public CompletableFuture<String> evaluate(String expression) { return manager.execute(new DbgEvaluateCommand(manager, expression)); } }
/* Copyright 2012 David Hadka * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ package org.moeaframework.problem.tsplib; import java.io.BufferedReader; import java.io.EOFException; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; /** * Stores the edges in a graph. */ public class EdgeData extends DistanceTable { /** * The number of nodes represented in this graph. */ private final int size; /** * The format of the edge data section. */ private final EdgeDataFormat format; /** * The edges. */ private final List<Edge> edges; /** * Constructs a new, empty graph with no edges. * * @param size the number of nodes represented in this graph * @param format the format of the edge data section */ public EdgeData(int size, EdgeDataFormat format) { super(); this.size = size; this.format = format; edges = new ArrayList<Edge>(); } /** * Reads the next line of adjacent edges, adding the parsed values to the * queue. * * @param reader the reader containing the adjacent edge data * @param entries the queue of identifies read by this method * @throws IOException if an I/O error occurred while reading the adjacent * edge data */ private void readNextLine(BufferedReader reader, Queue<Integer> entries) throws IOException { String line = reader.readLine(); if (line == null) { throw new EOFException("unexpectedly reached EOF"); } String[] tokens = line.trim().split("\\s+"); for (int i = 0; i < tokens.length; i++) { entries.offer(Integer.parseInt(tokens[i])); } } @Override public void load(BufferedReader reader) throws IOException { String line = null; switch (format) { case EDGE_LIST: while ((line = reader.readLine()) != null) { line = line.trim(); if (line.equals("-1")) { break; } else { String[] tokens = line.split("\\s+"); int id1 = Integer.parseInt(tokens[0]); int id2 = Integer.parseInt(tokens[1]); addEdge(id1, id2); } } break; case ADJ_LIST: int currentId = -1; Queue<Integer> values = new LinkedList<Integer>(); readNextLine(reader, values); while ((currentId != -1) && (values.peek() != -1)) { if (currentId == -1) { currentId = values.poll(); } else { int id = values.poll(); if (id == -1) { currentId = -1; } else { addEdge(currentId, id); } } if (values.isEmpty()) { readNextLine(reader, values); } } break; default: throw new IllegalArgumentException("edge format not supported"); } } /** * Adds an edge to this graph. * * @param id1 the identifier of the first node * @param id2 the identifier of the second node * @throws IllegalArgumentException if a node with the specified identifier * does not exist */ private void addEdge(int id1, int id2) { if ((id1 < 1) || (id1 > size)) { throw new IllegalArgumentException("no node with identifier " + id1); } if ((id2 < 1) || (id2 > size)) { throw new IllegalArgumentException("no node with identifier " + id2); } edges.add(new Edge(id1, id2)); } /** * Returns the edges contained in this graph. Changes to the returned * list will be reflected in this graph. * * @return the edges contained in this graph */ public List<Edge> getEdges() { return edges; } @Override public int[] listNodes() { int[] nodes = new int[size]; for (int i = 1; i <= size; i++) { nodes[i-1] = i; } return nodes; } @Override public int[] getNeighborsOf(int id) { if ((id < 1) || (id > size)) { throw new IllegalArgumentException("no node with identifier " + id); } List<Integer> neighbors = new ArrayList<Integer>(); for (Edge edge : edges) { if (edge.hasEndpoint(id)) { neighbors.add(edge.getOppositeEndpoint(id)); } } // copy neighbors to an array int[] result = new int[neighbors.size()]; for (int i = 0; i < neighbors.size(); i++) { result[i] = neighbors.get(i); } return result; } /** * {@inheritDoc} * * The distance between two nodes is {@code 1} when an edge exists, or * {@code Double.POSITIVE_INFINITY} when no such edge exists. */ @Override public double getDistanceBetween(int id1, int id2) { if (isNeighbor(id1, id2)) { return 1.0; } else { return Double.POSITIVE_INFINITY; } } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.neomedia.codec.audio.g729; import java.awt.*; import java.util.*; import javax.media.*; import javax.media.format.*; import net.sf.fmj.media.*; import org.jitsi.impl.neomedia.codec.*; import org.jitsi.service.neomedia.control.*; import org.jitsi.util.*; /** * * @author Lubomir Marinov */ public class JavaEncoder extends AbstractCodec2 implements AdvancedAttributesAwareCodec { private static final short BIT_1 = Ld8k.BIT_1; private static final int L_FRAME = Ld8k.L_FRAME; private static final int SERIAL_SIZE = Ld8k.SERIAL_SIZE; private static final int INPUT_FRAME_SIZE_IN_BYTES = 2 * L_FRAME; private static final int OUTPUT_FRAME_SIZE_IN_BYTES = L_FRAME / 8; /** * The count of the output frames to packetize. By default we packetize * 2 audio frames in one G729 packet. */ private int OUTPUT_FRAMES_COUNT = 2; private Coder coder; private int outFrameCount; /** * The previous input if it was less than the input frame size and which is * to be prepended to the next input in order to form a complete input * frame. */ private byte[] prevIn; /** * The length of the previous input if it was less than the input frame size * and which is to be prepended to the next input in order to form a * complete input frame. */ private int prevInLength; private short[] serial; private short[] sp16; /** * The duration an output <tt>Buffer</tt> produced by this <tt>Codec</tt> * in nanosecond. We packetize 2 audio frames in one G729 packet by default. */ private int duration = OUTPUT_FRAME_SIZE_IN_BYTES * OUTPUT_FRAMES_COUNT * 1000000; /** * Initializes a new <code>JavaEncoder</code> instance. */ public JavaEncoder() { super( "G.729 Encoder", AudioFormat.class, new AudioFormat[] { new AudioFormat( AudioFormat.G729_RTP, 8000, AudioFormat.NOT_SPECIFIED, 1) }); inputFormats = new AudioFormat[] { new AudioFormat( AudioFormat.LINEAR, 8000, 16, 1, AudioFormat.LITTLE_ENDIAN, AudioFormat.SIGNED) }; addControl(this); } /** * Get the output format. * * @return output format * @see net.sf.fmj.media.AbstractCodec#getOutputFormat() */ @Override public Format getOutputFormat() { Format f = super.getOutputFormat(); if ((f != null) && (f.getClass() == AudioFormat.class)) { AudioFormat af = (AudioFormat) f; f = setOutputFormat( new AudioFormat( af.getEncoding(), af.getSampleRate(), af.getSampleSizeInBits(), af.getChannels(), af.getEndian(), af.getSigned(), af.getFrameSizeInBits(), af.getFrameRate(), af.getDataType()) { private static final long serialVersionUID = 0L; @Override public long computeDuration(long length) { return JavaEncoder.this.duration; } }); } return f; } @Override protected void discardOutputBuffer(Buffer outputBuffer) { super.discardOutputBuffer(outputBuffer); outFrameCount = 0; } /* * Implements AbstractCodec2#doClose(). */ @Override protected void doClose() { prevIn = null; prevInLength = 0; sp16 = null; serial = null; coder = null; } /** * Opens this <tt>Codec</tt> and acquires the resources that it needs to * operate. A call to {@link PlugIn#open()} on this instance will result in * a call to <tt>doOpen</tt> only if {@link AbstractCodec#opened} is * <tt>false</tt>. All required input and/or output formats are assumed to * have been set on this <tt>Codec</tt> before <tt>doOpen</tt> is called. * * @throws ResourceUnavailableException if any of the resources that this * <tt>Codec</tt> needs to operate cannot be acquired * @see AbstractCodec2#doOpen() */ @Override protected void doOpen() throws ResourceUnavailableException { prevIn = new byte[INPUT_FRAME_SIZE_IN_BYTES]; prevInLength = 0; sp16 = new short[L_FRAME]; serial = new short[SERIAL_SIZE]; coder = new Coder(); outFrameCount = 0; } /* * Implements AbstractCodec2#doProcess(Buffer, Buffer). */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { byte[] in = (byte[]) inBuffer.getData(); int inLength = inBuffer.getLength(); int inOffset = inBuffer.getOffset(); if ((prevInLength + inLength) < INPUT_FRAME_SIZE_IN_BYTES) { System.arraycopy( in, inOffset, prevIn, prevInLength, inLength); prevInLength += inLength; return BUFFER_PROCESSED_OK | OUTPUT_BUFFER_NOT_FILLED; } int readShorts = 0; if (prevInLength > 0) { readShorts += readShorts(prevIn, 0, sp16, 0, prevInLength / 2); prevInLength = 0; } readShorts = readShorts( in, inOffset, sp16, readShorts, sp16.length - readShorts); int readBytes = 2 * readShorts; inLength -= readBytes; inBuffer.setLength(inLength); inOffset += readBytes; inBuffer.setOffset(inOffset); coder.process(sp16, serial); byte[] output = validateByteArraySize( outBuffer, outBuffer.getOffset() + OUTPUT_FRAMES_COUNT * OUTPUT_FRAME_SIZE_IN_BYTES, true); packetize( serial, output, outBuffer.getOffset() + OUTPUT_FRAME_SIZE_IN_BYTES * outFrameCount); outBuffer.setLength(outBuffer.getLength() + OUTPUT_FRAME_SIZE_IN_BYTES); outBuffer.setFormat(outputFormat); int ret = BUFFER_PROCESSED_OK; if (outFrameCount == (OUTPUT_FRAMES_COUNT - 1)) outFrameCount = 0; else { outFrameCount++; ret |= OUTPUT_BUFFER_NOT_FILLED; } if (inLength > 0) ret |= INPUT_BUFFER_NOT_CONSUMED; if(ret == BUFFER_PROCESSED_OK) { updateOutput( outBuffer, getOutputFormat(), outBuffer.getLength(), outBuffer.getOffset()); outBuffer.setDuration(duration); } return ret; } private void packetize(short[] serial, byte[] outFrame, int outFrameOffset) { Arrays.fill( outFrame, outFrameOffset, outFrameOffset + L_FRAME / 8, (byte) 0); for (int s = 0; s < L_FRAME; s++) { if (BIT_1 == serial[2 + s]) { int o = outFrameOffset + s / 8; int out = outFrame[o]; out |= 1 << (7 - (s % 8)); outFrame[o] = (byte) (out & 0xFF); } } } private static int readShorts( byte[] in, int inOffset, short[] out, int outOffset, int outLength) { for (int o=outOffset, i=inOffset; o<outLength; o++, i+=2) out[o] = ArrayIOUtils.readShort(in, i); return outLength; } /** * Sets the additional attributes to <tt>attributes</tt> * * @param attributes The additional attributes to set */ @Override public void setAdvancedAttributes(Map<String, String> attributes) { try { String s = attributes.get("ptime"); if ((s != null) && (s.length() != 0)) { int ptime = Integer.parseInt(s); OUTPUT_FRAMES_COUNT = ptime / OUTPUT_FRAME_SIZE_IN_BYTES; duration = OUTPUT_FRAME_SIZE_IN_BYTES * OUTPUT_FRAMES_COUNT * 1000000; } } catch (Exception e) { // Ignore } } /** * Not used. * @return null as it is not used. */ @Override public Component getControlComponent() { return null; } }
package com.alsiry.alquran; import com.alsiry.alquran.GozaTap.BookMarks; import com.alsiry.alquran.GozaTap.GozaName; import com.alsiry.alquran.GozaTap.MakeMadany; import com.alsiry.alquran.GozaTap.Pading; import com.alsiry.alquran.TextActor.TextAlign; import com.alsiry.alquran.book.Stages; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.utils.ActorGestureListener; public class PageTap extends Table{ int surah_page[]={ 1,2,50,77, 106,128,151,177, 187,208,221,235, 249,255,262,267, 282,293,305,312, 322,332,342,350, 359,367,377,385, 396,404,411,415, 418,428,434,440, 446,453,458,467, 477,483,489,496, 499,502,507,511, 515,518,520,523, 526,528,531,534, 537,542,545,549, 551,553,554,556, 558,560,562,564, 566,568,570,572, 574,575,577,578, 580,582,583,585, 586,587,587,589, 590,591,591,592, 593,594,595,595, 596,596,597,597, 598,598,599,599, 600,600,601,601, 601,602,602,602, 603,603,603,604, 604,604,605 //for exeption }; public static final float tab_width = Gdx.graphics.getWidth(); public static final float tab_height = Gdx.graphics.getHeight()/12; final float tab_no_width = .13f*tab_width; final float sorah_page_no_width = .13f*tab_width ; final float sorah_name_width =.30f* tab_width; final float sorah_tanzel_width = .1f*tab_width ; final float ayat_widht = .13f*tab_width; final float sorah_tab_book_mark_width = .07f*tab_width ; final float tab_no_text_scale = tab_height /250 ; final float tab_page_text_scale = tab_height /250 ; final float ayat_no_text_scale = tab_height/300 ; public static final float tab_pading_height = tab_height/100; boolean suar_state[]={ true,false,false,false , false,true,true,false , false,true,true,true, false,true,true,true, true,true,true,true, true,false,true,false, true,true,true,true, true,true,true,true, false,true,true,true, true,true,true,true, true,true,true,true, true,true,false,false, false,true,true,true, true,true,false,true, false,false,false,false , false,false,false,false , false,false,true,true, true,true,true,true, true,true,true,false, true,true,true,true, true,true,true,true, true,true,true,true, true,true,true,true, true,true,true,true, true,false,false,true, true,true,true,true, true,true,true,true, true,false,true,true, true,true }; //88 class MakeMadany extends Actor{ float width , height , x , y ; TextureRegion goza_name_texture; Sprite Goza_name_sprite ; int no ; TextActor surah_page_no ; float text_y_deferance ; public MakeMadany(int page_no , boolean state ){ // TODO Auto-generated constructor stub goza_name_texture = book.get_region(state?"makah":"madinah"); Goza_name_sprite = new Sprite(goza_name_texture); height = tab_height ; width = height; Goza_name_sprite.setSize(width, height); no = page_no ; x = sorah_page_no_width+(sorah_tanzel_width-width)/2 ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no-1))); text_y_deferance=2*tab_height/3; surah_page_no = new TextActor(""+no, null, sorah_page_no_width, TextAlign.align_cinter, tab_no_text_scale,0,text_y_deferance+y); } @Override public void draw(Batch batch, float parentAlpha) { // TODO Auto-generated method stub x = sorah_page_no_width+(sorah_tanzel_width-width)/2 ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no-1))); Goza_name_sprite.setPosition(x, y); Goza_name_sprite.setOriginCenter(); Goza_name_sprite.setScale(.7f); Goza_name_sprite.draw(batch); surah_page_no.set_position(0,text_y_deferance+ y); surah_page_no.draw(batch, parentAlpha); } } class BookMarks extends Actor{ TextureRegion bookmark_texture ; Sprite bookmark_sprite ; int bookmark_type , no ; boolean bookmark_state = false ; float x , y ; public BookMarks(int sorah_no , int type_from1_to3) { // TODO Auto-generated constructor stub bookmark_type = type_from1_to3 ; bookmark_texture = book.images_textures_atlas.findRegion("bookmark"+bookmark_type); bookmark_sprite = new Sprite(bookmark_texture) ; bookmark_sprite.setSize(sorah_tab_book_mark_width, tab_height); no = sorah_no ; } @Override public void draw(Batch batch, float parentAlpha) { // TODO Auto-generated method stub int checker = 0 ; float r,g,b ; r=g=b=0 ; super.draw(batch, parentAlpha); switch (bookmark_type) { case 1: checker = book.bookmark_a_page; r =1 ; g =.105f ; b =0.694f; break; case 2: checker = book.bookmark_b_page; r =.105f ; g =1 ; b =0.843f; break; case 3: checker = book.bookmark_c_page; r =1 ; g =.862f; b =.105f ; break; default: break; } if ((checker == no )) { bookmark_state = true; } else bookmark_state = false; if(bookmark_state){ x = (sorah_page_no_width+sorah_tanzel_width)+(sorah_tab_book_mark_width)*(bookmark_type-1) ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no-1))); bookmark_sprite.setColor(r, g, b, .8f); bookmark_sprite.setPosition(x, y); bookmark_sprite.setOriginCenter(); bookmark_sprite.setScale(.5f, .95f); bookmark_sprite.draw(batch); } } } class GozaName extends Actor{ float width , height , x , y ; TextureRegion goza_name_texture; Sprite Goza_name_sprite ; int no ; TextActor no_of_surah ; float text_y_deferance ; public GozaName(int goza_no , int page_no) { // TODO Auto-generated constructor stub goza_name_texture = book.images_textures_atlas.findRegion("s"+goza_no); Goza_name_sprite = new Sprite(goza_name_texture); width = sorah_name_width ; height = tab_height ; Goza_name_sprite.setSize(width, height); no = page_no ; text_y_deferance=2*tab_height/3; x = (sorah_page_no_width+sorah_tanzel_width+ayat_widht)+sorah_tab_book_mark_width*3 ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no-1))); no_of_surah = new TextActor(page_no+"", null, tab_no_width, TextAlign.align_cinter,tab_no_text_scale,x+sorah_name_width, text_y_deferance+y); } @Override public void draw(Batch batch, float parentAlpha) { // TODO Auto-generated method stub x = (sorah_page_no_width+sorah_tanzel_width+ayat_widht)+sorah_tab_book_mark_width*3 ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no-1))); Goza_name_sprite.setPosition(x, y); Goza_name_sprite.setOriginCenter(); Goza_name_sprite.setScale(.9f,.7f); Goza_name_sprite.draw(batch); no_of_surah.set_position(x+sorah_name_width,text_y_deferance+ y); no_of_surah.draw(batch, parentAlpha); } } class Pading extends Actor{ TextureRegion bar ; Sprite pading_sprite; float x , y , width ,height ; int no ; public Pading(int no_of_me) { // TODO Auto-generated constructor stub bar = book.loading_texture_atlas.findRegion("loadingbar"); pading_sprite = new Sprite(bar); pading_sprite.setColor(0,0,0,1); width = tab_width ; height = tab_pading_height ; pading_sprite.setSize(width, height); no = no_of_me ; } @Override public void draw(Batch batch, float parentAlpha) { // TODO Auto-generated method stub super.draw(batch, parentAlpha); x =0 ; y = book.select_page_pane.getScrollY()+Gdx.graphics.getHeight()-((tab_height*no+tab_pading_height*(no))); pading_sprite.setPosition(x, y); pading_sprite.draw(batch); } } public PageTap(final int page_no) { int surah_no =1 ; boolean compleat = true ; for (int i = 0; i < surah_page.length-1; i++) { if(compleat){ if (page_no == surah_page[i]){ surah_no = i ; compleat=false; }else if (page_no>=surah_page[i]&&page_no<surah_page[i+1]){ surah_no = i ; compleat=false; }} } boolean maky_or_madany = suar_state[surah_no] ; this.add(new GozaName(surah_no+1 , page_no)).width(/*gozaname_width+gozatab_book_mark_width*3+tab_no_width*/tab_width).height(tab_height); this.add(new MakeMadany( page_no,maky_or_madany)); for (int i=1 ;i<=3 ;i++ ) { this.add(new BookMarks( page_no , i)); } this.addListener(new ActorGestureListener(){ @Override public void tap(InputEvent event, float x, float y, int count, int button) { // TODO Auto-generated method stub super.tap(event, x, y, count, button); book.stage_detector=Stages.pages ; book.config_input_prossesor(Stages.pages); book.current_page = page_no ; book.pages_stage.draw(); book.pages_draw_stage.draw(); book.pages_stage.act(Gdx.graphics.getDeltaTime()); book.pages_scroll_pane.setScrollY((book.current_page-1)*book.screen_height ); book.snab_to =((book.current_page-1)*book.screen_height ) ; book.update_saved_page_no(book.current_page) ; } }); this.row(); if ( page_no!=book.pages_no) { this.add(new Pading( page_no)).width(tab_width).height(tab_pading_height); }else{ this.add(new Table()).width(tab_width).height(tab_pading_height);} } }
package slacknotifications; import com.google.gson.Gson; import jetbrains.buildServer.util.StringUtil; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.NameValuePair; import org.apache.http.auth.AuthScope; import org.apache.http.auth.Credentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.http.HttpHost; import org.springframework.util.StringUtils; import slacknotifications.teamcity.BuildState; import slacknotifications.teamcity.Loggers; import slacknotifications.teamcity.payload.content.Commit; import slacknotifications.teamcity.payload.content.PostMessageResponse; import slacknotifications.teamcity.payload.content.SlackNotificationPayloadContent; import sun.reflect.generics.reflectiveObjects.NotImplementedException; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; public class SlackNotificationImpl implements SlackNotification { private String proxyHost; private Integer proxyPort = 0; private String proxyUsername; private String proxyPassword; private String channel; private String teamName; private String token; private String iconUrl; private String content; private SlackNotificationPayloadContent payload; private Integer resultCode; private HttpClient client; private String filename = ""; private Boolean enabled = false; private Boolean errored = false; private String errorReason = ""; private List<NameValuePair> params = new ArrayList<NameValuePair>(); private BuildState states; private String botName; private final static String CONTENT_TYPE = "application/x-www-form-urlencoded"; private PostMessageResponse response; private Boolean showBuildAgent; private Boolean showElapsedBuildTime; private boolean showCommits; private boolean showCommitters; private int maxCommitsToDisplay; private boolean mentionChannelEnabled; private boolean mentionSlackUserEnabled; private boolean showFailureReason; /* This is a bit mask of states that should trigger a SlackNotification. * All ones (11111111) means that all states will trigger the slacknotifications * We'll set that as the default, and then override if we get a more specific bit mask. */ //private Integer EventListBitMask = BuildState.ALL_ENABLED; //private Integer EventListBitMask = Integer.parseInt("0",2); public SlackNotificationImpl() { this.client = HttpClients.createDefault(); this.params = new ArrayList<NameValuePair>(); } public SlackNotificationImpl(String channel) { this.channel = channel; this.client = HttpClients.createDefault(); this.params = new ArrayList<NameValuePair>(); } public SlackNotificationImpl(String channel, String proxyHost, String proxyPort) { this.channel = channel; this.client = HttpClients.createDefault(); this.params = new ArrayList<NameValuePair>(); if (proxyPort.length() != 0) { try { this.proxyPort = Integer.parseInt(proxyPort); } catch (NumberFormatException ex) { ex.printStackTrace(); } } this.setProxy(proxyHost, this.proxyPort, null); } public SlackNotificationImpl(String channel, String proxyHost, Integer proxyPort) { this.channel = channel; this.client = HttpClients.createDefault(); this.params = new ArrayList<NameValuePair>(); this.setProxy(proxyHost, proxyPort, null); } public SlackNotificationImpl(String channel, SlackNotificationProxyConfig proxyConfig) { this.channel = channel; this.client = HttpClients.createDefault(); this.params = new ArrayList<NameValuePair>(); setProxy(proxyConfig); } public SlackNotificationImpl(HttpClient httpClient, String channel) { this.channel = channel; this.client = httpClient; } public void setProxy(SlackNotificationProxyConfig proxyConfig) { if ((proxyConfig != null) && (proxyConfig.getProxyHost() != null) && (proxyConfig.getProxyPort() != null)) { this.setProxy(proxyConfig.getProxyHost(), proxyConfig.getProxyPort(), proxyConfig.getCreds()); } } public void setProxy(String proxyHost, Integer proxyPort, Credentials credentials) { this.proxyHost = proxyHost; this.proxyPort = proxyPort; if (this.proxyHost.length() > 0 && !this.proxyPort.equals(0)) { HttpClientBuilder clientBuilder = HttpClients.custom() .useSystemProperties() .setProxy(new HttpHost(proxyHost, proxyPort, "http")); if (credentials != null) { CredentialsProvider credsProvider = new BasicCredentialsProvider(); credsProvider.setCredentials(new AuthScope(proxyHost, proxyPort), credentials); clientBuilder.setDefaultCredentialsProvider(credsProvider); Loggers.SERVER.debug("SlackNotification ::using proxy credentials " + credentials.getUserPrincipal().getName()); } this.client = clientBuilder.build(); } } public void post() throws IOException { if(getIsApiToken()){ postViaApi(); } else{ postViaWebHook(); } } private void postViaApi() throws IOException { if ((this.enabled) && (!this.errored)) { if (this.teamName == null) { this.teamName = ""; } String url = String.format("https://slack.com/api/chat.postMessage?token=%s&username=%s&icon_url=%s&channel=%s&text=%s&pretty=1", this.token, this.botName == null ? "" : URLEncoder.encode(this.botName, "UTF-8"), this.iconUrl == null ? "" : URLEncoder.encode(this.iconUrl, "UTF-8"), this.channel == null ? "" : URLEncoder.encode(this.channel, "UTF-8"), this.payload == null ? "" : URLEncoder.encode(payload.getBuildDescriptionWithLinkSyntax(), "UTF-8"), ""); HttpPost httppost = new HttpPost(url); Loggers.SERVER.info("SlackNotificationListener :: Preparing message for URL " + url + " using proxy " + this.proxyHost + ":" + this.proxyPort); if (this.filename.length() > 0) { File file = new File(this.filename); throw new NotImplementedException(); } if (this.payload != null) { List<Attachment> attachments = getAttachments(); String attachmentsParam = String.format("attachments=%s", URLEncoder.encode(convertAttachmentsToJson(attachments), "UTF-8")); Loggers.SERVER.info("SlackNotificationListener :: Body message will be " + attachmentsParam); httppost.setEntity(new StringEntity(attachmentsParam)); httppost.setHeader("Content-Type", CONTENT_TYPE); } try { HttpResponse response = client.execute(httppost); this.resultCode = response.getStatusLine().getStatusCode(); if (this.resultCode == HttpStatus.SC_OK) { this.response = PostMessageResponse.fromJson(EntityUtils.toString(response.getEntity())); } if (response.getEntity().getContentLength() > 0) { this.content = EntityUtils.toString(response.getEntity()); } } finally { httppost.releaseConnection(); } } } private void postViaWebHook() throws IOException { if ((this.enabled) && (!this.errored)) { if (this.teamName == null) { this.teamName = ""; } String url = ""; if(this.token != null && this.token.startsWith("http")){ url = this.token; } else { url = String.format("https://%s.slack.com/services/hooks/incoming-webhook?token=%s", this.teamName.toLowerCase(), this.token); } Loggers.SERVER.info("SlackNotificationListener :: Preparing message for URL " + url); WebHookPayload requestBody = new WebHookPayload(); requestBody.setChannel(this.getChannel()); requestBody.setUsername(this.getBotName()); requestBody.setIcon_url(this.getIconUrl()); HttpPost httppost = new HttpPost(url); if (this.payload != null) { requestBody.setText(payload.getBuildDescriptionWithLinkSyntax()); requestBody.setAttachments(getAttachments()); } String bodyParam = String.format("payload=%s", URLEncoder.encode(requestBody.toJson(), "UTF-8")); Loggers.SERVER.info("SlackNotificationListener :: Body message will be " + bodyParam); httppost.setEntity(new StringEntity(bodyParam)); httppost.setHeader("Content-Type", CONTENT_TYPE); try { HttpResponse response = client.execute(httppost); this.resultCode = response.getStatusLine().getStatusCode(); PostMessageResponse resp = new PostMessageResponse(); if (this.resultCode != HttpStatus.SC_OK) { String error = EntityUtils.toString(response.getEntity()); resp.setOk(error == "ok"); resp.setError(error); } else{ resp.setOk(true); this.response = resp; } this.content = EntityUtils.toString(response.getEntity()); } finally { httppost.releaseConnection(); } } } private List<Attachment> getAttachments() { List<Attachment> attachments = new ArrayList<Attachment>(); Attachment attachment = new Attachment(this.payload.getBuildName(), null, null, this.payload.getColor()); List<String> firstDetailLines = new ArrayList<String>(); if(showBuildAgent == null || showBuildAgent){ firstDetailLines.add("Agent: " + this.payload.getAgentName()); } if(this.payload.getIsComplete() && (showElapsedBuildTime == null || showElapsedBuildTime)){ firstDetailLines.add("Elapsed: " + formatTime(this.payload.getElapsedTime())); } attachment.addField(this.payload.getBuildName(), StringUtil.join(firstDetailLines, "\n"), false); if(showFailureReason && this.payload.getBuildResult() == SlackNotificationPayloadContent.BUILD_STATUS_FAILURE){ if(this.payload.getFailedBuildMessages().size() > 0) { attachment.addField("Reason", StringUtil.join(", ", payload.getFailedBuildMessages()), false); } if(this.payload.getFailedTestNames().size() > 0){ ArrayList<String> failedTestNames = payload.getFailedTestNames(); String truncated = ""; if(failedTestNames.size() > 10){ failedTestNames = new ArrayList<String>( failedTestNames.subList(0, 9)); truncated = " (+ " + Integer.toString(payload.getFailedBuildMessages().size() - 10) + " more)"; } payload.getFailedTestNames().size(); attachment.addField("Failed Tests", StringUtil.join(", ", failedTestNames) + truncated, false); } } StringBuilder sbCommits = new StringBuilder(); List<Commit> commits = this.payload.getCommits(); List<Commit> commitsToDisplay = new ArrayList<Commit>(commits); if(showCommits) { boolean truncated = false; int totalCommits = commitsToDisplay.size(); if (commitsToDisplay.size() > maxCommitsToDisplay) { commitsToDisplay = commitsToDisplay.subList(0, maxCommitsToDisplay > commitsToDisplay.size() ? commitsToDisplay.size() : 5); truncated = true; } for (Commit commit : commitsToDisplay) { String revision = commit.getRevision(); revision = revision == null ? "" : revision; sbCommits.append(String.format("%s :: %s :: %s\n", revision.substring(0, Math.min(revision.length(), 10)), commit.getUserName(), commit.getDescription())); } if (truncated) { sbCommits.append(String.format("(+ %d more)\n", totalCommits - 5)); } if (!commitsToDisplay.isEmpty()) { attachment.addField("Commits", sbCommits.toString(), false); } } List<String> slackUsers = new ArrayList<String>(); for(Commit commit : commits){ if(commit.hasSlackUsername()){ slackUsers.add("@" + commit.getSlackUserName()); } } HashSet<String> tempHash = new HashSet<String>(slackUsers); slackUsers = new ArrayList<String>(tempHash); if(showCommitters) { Set<String> committers = new HashSet<String>(); for (Commit commit : commits) { committers.add(commit.getUserName()); } String committersString = StringUtil.join(", ", committers); if (!commits.isEmpty()) { attachment.addField("Changes By", committersString, false); } } // Mention the channel and/or the Slack Username of any committers if known if(payload.getIsFirstFailedBuild() && (mentionChannelEnabled || (mentionSlackUserEnabled && !slackUsers.isEmpty()))){ String mentionContent = ":arrow_up: \"" + this.payload.getBuildName() + "\" Failed "; if(mentionChannelEnabled){ mentionContent += "<!channel> "; } if(mentionSlackUserEnabled && !slackUsers.isEmpty() && !this.payload.isMergeBranch()) { mentionContent += StringUtil.join(" ", slackUsers); } attachment.addField("", mentionContent, true); } attachments.add(attachment); return attachments; } private class WebHookPayload { private String channel; private String username; private String text; private String icon_url; private List<Attachment> attachments; public String getChannel() { return channel; } public void setChannel(String channel) { this.channel = channel; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getText() { return text; } public void setText(String text) { this.text = text; } public String getIcon_url() { return icon_url; } public void setIcon_url(String icon_url) { this.icon_url = icon_url; } public List<Attachment> getAttachments() { return attachments; } public void setAttachments(List<Attachment> attachments) { this.attachments = attachments; } public String toJson() { Gson gson = new Gson(); return gson.toJson(this); } } public static String convertAttachmentsToJson(List<Attachment> attachments) { Gson gson = new Gson(); return gson.toJson(attachments); // XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); // xstream.setMode(XStream.NO_REFERENCES); // xstream.alias("build", Attachment.class); // /* For some reason, the items are coming back as "@name" and "@value" // * so strip those out with a regex. // */ // return xstream.toXML(attachments).replaceAll("\"@(fallback|text|pretext|color|fields|title|value|short)\": \"(.*)\"", "\"$1\": \"$2\""); } public Integer getStatus() { return this.resultCode; } public String getProxyHost() { return proxyHost; } public int getProxyPort() { return proxyPort; } public String getTeamName() { return teamName; } public void setTeamName(String teamName) { this.teamName = teamName; } public String getToken() { return token; } public void setToken(String token) { this.token = token; } public String getIconUrl() { return this.iconUrl; } public void setIconUrl(String iconUrl) { this.iconUrl = iconUrl; } public String getBotName() { return this.botName; } public void setBotName(String botName) { this.botName = botName; } public String getChannel() { return channel; } public void setChannel(String channel) { this.channel = channel; } public String getParameterisedUrl() { //TODO: Implement different url logic return this.channel + this.parametersAsQueryString(); } public String parametersAsQueryString() { String s = ""; for (Iterator<NameValuePair> i = this.params.iterator(); i.hasNext(); ) { NameValuePair nv = i.next(); s += "&" + nv.getName() + "=" + nv.getValue(); } if (s.length() > 0) { return "?" + s.substring(1); } return s; } public void addParam(String key, String value) { this.params.add(new BasicNameValuePair(key, value)); } public void addParams(List<NameValuePair> paramsList) { for (Iterator<NameValuePair> i = paramsList.iterator(); i.hasNext(); ) { this.params.add(i.next()); } } public String getParam(String key) { for (Iterator<NameValuePair> i = this.params.iterator(); i.hasNext(); ) { NameValuePair nv = i.next(); if (nv.getName().equals(key)) { return nv.getValue(); } } return ""; } public void setFilename(String filename) { this.filename = filename; } public String getFilename() { return filename; } public String getContent() { return content; } public Boolean isEnabled() { return enabled; } public void setEnabled(Boolean enabled) { this.enabled = enabled; } public void setEnabled(String enabled) { if (enabled.toLowerCase().equals("true")) { this.enabled = true; } else { this.enabled = false; } } public Boolean isErrored() { return errored; } public void setErrored(Boolean errored) { this.errored = errored; } public String getErrorReason() { return errorReason; } public void setErrorReason(String errorReason) { this.errorReason = errorReason; } // public Integer getEventListBitMask() { // return EventListBitMask; // } // // public void setTriggerStateBitMask(Integer triggerStateBitMask) { // EventListBitMask = triggerStateBitMask; // } public String getProxyUsername() { return proxyUsername; } public void setProxyUsername(String proxyUsername) { this.proxyUsername = proxyUsername; } public String getProxyPassword() { return proxyPassword; } public void setProxyPassword(String proxyPassword) { this.proxyPassword = proxyPassword; } public SlackNotificationPayloadContent getPayload() { return payload; } public void setPayload(SlackNotificationPayloadContent payloadContent) { this.payload = payloadContent; } @Override public BuildState getBuildStates() { return states; } @Override public void setBuildStates(BuildState states) { this.states = states; } public PostMessageResponse getResponse() { return response; } @Override public void setShowBuildAgent(Boolean showBuildAgent) { this.showBuildAgent = showBuildAgent; } @Override public void setShowElapsedBuildTime(Boolean showElapsedBuildTime) { this.showElapsedBuildTime = showElapsedBuildTime; } @Override public void setShowCommits(boolean showCommits) { this.showCommits = showCommits; } @Override public void setShowCommitters(boolean showCommitters) { this.showCommitters = showCommitters; } @Override public void setMaxCommitsToDisplay(int maxCommitsToDisplay) { this.maxCommitsToDisplay = maxCommitsToDisplay; } @Override public void setMentionChannelEnabled(boolean mentionChannelEnabled) { this.mentionChannelEnabled = mentionChannelEnabled; } @Override public void setMentionSlackUserEnabled(boolean mentionSlackUserEnabled) { this.mentionSlackUserEnabled = mentionSlackUserEnabled; } @Override public void setShowFailureReason(boolean showFailureReason) { this.showFailureReason = showFailureReason; } public boolean getIsApiToken() { if(this.token != null && this.token.startsWith("http")){ // We now accept a webhook url. return false; } return this.token == null || this.token.split("-").length > 1; } private String formatTime(long seconds){ if(seconds < 60){ return seconds + "s"; } return String.format("%dm:%ds", TimeUnit.SECONDS.toMinutes(seconds), TimeUnit.SECONDS.toSeconds(seconds) - TimeUnit.MINUTES.toSeconds(TimeUnit.SECONDS.toMinutes(seconds)) ); } }