gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import io.netty.handler.codec.CharSequenceValueConverter; import io.netty.handler.codec.DefaultHeaders; import io.netty.util.AsciiString; import io.netty.util.ByteProcessor; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.UnstableApi; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Exception.connectionError; import static io.netty.handler.codec.http2.Http2Headers.PseudoHeaderName.hasPseudoHeaderFormat; import static io.netty.util.AsciiString.CASE_INSENSITIVE_HASHER; import static io.netty.util.AsciiString.CASE_SENSITIVE_HASHER; import static io.netty.util.AsciiString.isUpperCase; @UnstableApi public class DefaultHttp2Headers extends DefaultHeaders<CharSequence, CharSequence, Http2Headers> implements Http2Headers { private static final ByteProcessor HTTP2_NAME_VALIDATOR_PROCESSOR = new ByteProcessor() { @Override public boolean process(byte value) { return !isUpperCase(value); } }; static final NameValidator<CharSequence> HTTP2_NAME_VALIDATOR = new NameValidator<CharSequence>() { @Override public void validateName(CharSequence name) { if (name == null || name.length() == 0) { PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, "empty headers are not allowed [%s]", name)); } if (name instanceof AsciiString) { final int index; try { index = ((AsciiString) name).forEachByte(HTTP2_NAME_VALIDATOR_PROCESSOR); } catch (Http2Exception e) { PlatformDependent.throwException(e); return; } catch (Throwable t) { PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, t, "unexpected error. invalid header name [%s]", name)); return; } if (index != -1) { PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, "invalid header name [%s]", name)); } } else { for (int i = 0; i < name.length(); ++i) { if (isUpperCase(name.charAt(i))) { PlatformDependent.throwException(connectionError(PROTOCOL_ERROR, "invalid header name [%s]", name)); } } } } }; private HeaderEntry<CharSequence, CharSequence> firstNonPseudo = head; /** * Create a new instance. * <p> * Header names will be validated according to * <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>. */ public DefaultHttp2Headers() { this(true); } /** * Create a new instance. * @param validate {@code true} to validate header names according to * <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>. {@code false} to not validate header names. */ @SuppressWarnings("unchecked") public DefaultHttp2Headers(boolean validate) { // Case sensitive compare is used because it is cheaper, and header validation can be used to catch invalid // headers. super(CASE_SENSITIVE_HASHER, CharSequenceValueConverter.INSTANCE, validate ? HTTP2_NAME_VALIDATOR : NameValidator.NOT_NULL); } /** * Create a new instance. * @param validate {@code true} to validate header names according to * <a href="https://tools.ietf.org/html/rfc7540">rfc7540</a>. {@code false} to not validate header names. * @param arraySizeHint A hint as to how large the hash data structure should be. * The next positive power of two will be used. An upper bound may be enforced. */ @SuppressWarnings("unchecked") public DefaultHttp2Headers(boolean validate, int arraySizeHint) { // Case sensitive compare is used because it is cheaper, and header validation can be used to catch invalid // headers. super(CASE_SENSITIVE_HASHER, CharSequenceValueConverter.INSTANCE, validate ? HTTP2_NAME_VALIDATOR : NameValidator.NOT_NULL, arraySizeHint); } @Override public Http2Headers clear() { this.firstNonPseudo = head; return super.clear(); } @Override public boolean equals(Object o) { return o instanceof Http2Headers && equals((Http2Headers) o, CASE_SENSITIVE_HASHER); } @Override public int hashCode() { return hashCode(CASE_SENSITIVE_HASHER); } @Override public Http2Headers method(CharSequence value) { set(PseudoHeaderName.METHOD.value(), value); return this; } @Override public Http2Headers scheme(CharSequence value) { set(PseudoHeaderName.SCHEME.value(), value); return this; } @Override public Http2Headers authority(CharSequence value) { set(PseudoHeaderName.AUTHORITY.value(), value); return this; } @Override public Http2Headers path(CharSequence value) { set(PseudoHeaderName.PATH.value(), value); return this; } @Override public Http2Headers status(CharSequence value) { set(PseudoHeaderName.STATUS.value(), value); return this; } @Override public CharSequence method() { return get(PseudoHeaderName.METHOD.value()); } @Override public CharSequence scheme() { return get(PseudoHeaderName.SCHEME.value()); } @Override public CharSequence authority() { return get(PseudoHeaderName.AUTHORITY.value()); } @Override public CharSequence path() { return get(PseudoHeaderName.PATH.value()); } @Override public CharSequence status() { return get(PseudoHeaderName.STATUS.value()); } @Override public boolean contains(CharSequence name, CharSequence value) { return contains(name, value, false); } @Override public boolean contains(CharSequence name, CharSequence value, boolean caseInsensitive) { return contains(name, value, caseInsensitive ? CASE_INSENSITIVE_HASHER : CASE_SENSITIVE_HASHER); } @Override protected final HeaderEntry<CharSequence, CharSequence> newHeaderEntry(int h, CharSequence name, CharSequence value, HeaderEntry<CharSequence, CharSequence> next) { return new Http2HeaderEntry(h, name, value, next); } private final class Http2HeaderEntry extends HeaderEntry<CharSequence, CharSequence> { protected Http2HeaderEntry(int hash, CharSequence key, CharSequence value, HeaderEntry<CharSequence, CharSequence> next) { super(hash, key); this.value = value; this.next = next; // Make sure the pseudo headers fields are first in iteration order if (hasPseudoHeaderFormat(key)) { after = firstNonPseudo; before = firstNonPseudo.before(); } else { after = head; before = head.before(); if (firstNonPseudo == head) { firstNonPseudo = this; } } pointNeighborsToThis(); } @Override protected void remove() { if (this == firstNonPseudo) { firstNonPseudo = firstNonPseudo.after(); } super.remove(); } } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.gallery3d.app; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Rect; import android.util.DisplayMetrics; import android.view.MotionEvent; import android.view.View; import com.groupme.android.videokit.R; import java.util.Locale; /** * The time bar view, which includes the current and total time, the progress * bar, and the scrubber. */ public class TimeBar extends View { public interface Listener { void onScrubbingStart(); void onScrubbingMove(int time); void onScrubbingEnd(int time, int start, int end); } // Padding around the scrubber to increase its touch target private static final int SCRUBBER_PADDING_IN_DP = 10; // The total padding, top plus bottom private static final int V_PADDING_IN_DP = 30; private static final int TEXT_SIZE_IN_DP = 14; protected Listener mListener; // the bars we use for displaying the progress protected final Rect mProgressBar; protected final Rect mPlayedBar; protected final Paint mProgressPaint; protected final Paint mPlayedPaint; protected final Paint mTimeTextPaint; protected final Bitmap mScrubber; protected int mScrubberPadding; // adds some touch tolerance around the // scrubber protected int mScrubberLeft; protected int mScrubberTop; protected int mScrubberCorrection; protected boolean mScrubbing; protected boolean mShowTimes; protected boolean mShowScrubber; protected int mTotalTime; protected int mCurrentTime; protected final Rect mTimeBounds; protected int mVPaddingInPx; TimeBar(Context context) { super(context); mShowTimes = true; mShowScrubber = true; mProgressBar = new Rect(); mPlayedBar = new Rect(); mProgressPaint = new Paint(); mProgressPaint.setColor(0xFF808080); mPlayedPaint = new Paint(); mPlayedPaint.setColor(0xFF33B5E5); DisplayMetrics metrics = context.getResources().getDisplayMetrics(); float textSizeInPx = metrics.density * TEXT_SIZE_IN_DP; mTimeTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mTimeTextPaint.setColor(0xFFCECECE); mTimeTextPaint.setTextSize(textSizeInPx); mTimeTextPaint.setTextAlign(Paint.Align.CENTER); mTimeBounds = new Rect(); mTimeTextPaint.getTextBounds("0:00:00", 0, 7, mTimeBounds); mScrubber = BitmapFactory.decodeResource(getResources(), R.drawable.play_scrubber); mScrubberPadding = (int) (metrics.density * SCRUBBER_PADDING_IN_DP); mVPaddingInPx = (int) (metrics.density * V_PADDING_IN_DP); } public TimeBar(Context context, Listener listener) { this(context); mListener = checkNotNull(listener); } private void update() { mPlayedBar.set(mProgressBar); if (mTotalTime > 0) { mPlayedBar.right = mPlayedBar.left + (int) ((mProgressBar.width() * (long) mCurrentTime) / mTotalTime); } else { mPlayedBar.right = mProgressBar.left; } if (!mScrubbing) { mScrubberLeft = mPlayedBar.right - mScrubber.getWidth() / 2; } invalidate(); } /** * @return the preferred height of this view, including invisible padding */ public int getPreferredHeight() { return mTimeBounds.height() + mVPaddingInPx + mScrubberPadding; } /** * @return the height of the time bar, excluding invisible padding */ public int getBarHeight() { return mTimeBounds.height() + mVPaddingInPx; } public void setTime(int currentTime, int totalTime, int trimStartTime, int trimEndTime) { if (mCurrentTime == currentTime && mTotalTime == totalTime) { return; } mCurrentTime = currentTime; mTotalTime = totalTime; update(); } private boolean inScrubber(float x, float y) { int scrubberRight = mScrubberLeft + mScrubber.getWidth(); int scrubberBottom = mScrubberTop + mScrubber.getHeight(); return mScrubberLeft - mScrubberPadding < x && x < scrubberRight + mScrubberPadding && mScrubberTop - mScrubberPadding < y && y < scrubberBottom + mScrubberPadding; } private void clampScrubber() { int half = mScrubber.getWidth() / 2; int max = mProgressBar.right - half; int min = mProgressBar.left - half; mScrubberLeft = Math.min(max, Math.max(min, mScrubberLeft)); } private int getScrubberTime() { return (int) ((long) (mScrubberLeft + mScrubber.getWidth() / 2 - mProgressBar.left) * mTotalTime / mProgressBar.width()); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { int w = r - l; int h = b - t; if (!mShowTimes && !mShowScrubber) { mProgressBar.set(0, 0, w, h); } else { int margin = mScrubber.getWidth() / 3; if (mShowTimes) { margin += mTimeBounds.width(); } int progressY = (h + mScrubberPadding) / 2; mScrubberTop = progressY - mScrubber.getHeight() / 2 + 1; mProgressBar.set( getPaddingLeft() + margin, progressY, w - getPaddingRight() - margin, progressY + 4); } update(); } @Override protected void onDraw(Canvas canvas) { // draw progress bars canvas.drawRect(mProgressBar, mProgressPaint); canvas.drawRect(mPlayedBar, mPlayedPaint); // draw scrubber and timers if (mShowScrubber) { canvas.drawBitmap(mScrubber, mScrubberLeft, mScrubberTop, null); } if (mShowTimes) { canvas.drawText( stringForTime(mCurrentTime), mTimeBounds.width() / 2f + getPaddingLeft(), mTimeBounds.height() + mVPaddingInPx / 2f + mScrubberPadding + 1, mTimeTextPaint); canvas.drawText( stringForTime(mTotalTime), getWidth() - getPaddingRight() - mTimeBounds.width() / 2, mTimeBounds.height() + mVPaddingInPx / 2f + mScrubberPadding + 1, mTimeTextPaint); } } @Override public boolean onTouchEvent(MotionEvent event) { if (mShowScrubber) { int x = (int) event.getX(); int y = (int) event.getY(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: { mScrubberCorrection = inScrubber(x, y) ? x - mScrubberLeft : mScrubber.getWidth() / 2; mScrubbing = true; mListener.onScrubbingStart(); } // fall-through case MotionEvent.ACTION_MOVE: { mScrubberLeft = x - mScrubberCorrection; clampScrubber(); mCurrentTime = getScrubberTime(); mListener.onScrubbingMove(mCurrentTime); invalidate(); return true; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: { mListener.onScrubbingEnd(getScrubberTime(), 0, 0); mScrubbing = false; return true; } } } return false; } protected String stringForTime(long millis) { int totalSeconds = (int) millis / 1000; int seconds = totalSeconds % 60; int minutes = (totalSeconds / 60) % 60; int hours = totalSeconds / 3600; if (hours > 0) { return String.format(Locale.US, "%d:%02d:%02d", hours, minutes, seconds); } else { return String.format(Locale.US, "%02d:%02d", minutes, seconds); } } public void setSeekable(boolean canSeek) { mShowScrubber = canSeek; } // Throws NullPointerException if the input is null. public static <T> T checkNotNull(T object) { if (object == null) throw new NullPointerException(); return object; } }
/* * Copyright 2014 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.real_logic.aeron.samples; import uk.co.real_logic.aeron.Subscription; import uk.co.real_logic.aeron.common.BackoffIdleStrategy; import uk.co.real_logic.aeron.common.IdleStrategy; import uk.co.real_logic.aeron.common.RateReporter; import uk.co.real_logic.aeron.common.concurrent.logbuffer.DataHandler; import uk.co.real_logic.aeron.common.protocol.HeaderFlyweight; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static uk.co.real_logic.aeron.common.CommonContext.ADMIN_DIR_PROP_NAME; import static uk.co.real_logic.aeron.common.CommonContext.COUNTERS_DIR_PROP_NAME; import static uk.co.real_logic.aeron.common.CommonContext.DATA_DIR_PROP_NAME; /** * Utility functions for samples */ public class SamplesUtil { /** * Use shared memory on Linux to avoid contention on the page cache. */ public static void useSharedMemoryOnLinux() { if ("Linux".equalsIgnoreCase(System.getProperty("os.name"))) { if (null == System.getProperty(ADMIN_DIR_PROP_NAME)) { System.setProperty(ADMIN_DIR_PROP_NAME, "/dev/shm/aeron/conductor"); } if (null == System.getProperty(COUNTERS_DIR_PROP_NAME)) { System.setProperty(COUNTERS_DIR_PROP_NAME, "/dev/shm/aeron/counters"); } if (null == System.getProperty(DATA_DIR_PROP_NAME)) { System.setProperty(DATA_DIR_PROP_NAME, "/dev/shm/aeron/data"); } } } /** * Return a reusable, parameterized event loop that calls a default idler when no messages are received * * @param limit passed to {@link Subscription#poll(int)} * @param running indication for loop * @return loop function */ public static Consumer<Subscription> subscriberLoop(final int limit, final AtomicBoolean running) { final IdleStrategy idleStrategy = new BackoffIdleStrategy( 100, 10, TimeUnit.MICROSECONDS.toNanos(1), TimeUnit.MICROSECONDS.toNanos(100)); return subscriberLoop(limit, running, idleStrategy); } /** * Return a reusable, parameterized event loop that calls and idler when no messages are received * * @param limit passed to {@link Subscription#poll(int)} * @param running indication for loop * @param idleStrategy to use for loop * @return loop function */ public static Consumer<Subscription> subscriberLoop( final int limit, final AtomicBoolean running, final IdleStrategy idleStrategy) { return (subscription) -> { try { while (running.get()) { final int fragmentsRead = subscription.poll(limit); idleStrategy.idle(fragmentsRead); } } catch (final Exception ex) { ex.printStackTrace(); } }; } /** * Return a reusable, parameterized {@link DataHandler} that prints to stdout * * @param streamId to show when printing * @return subscription data handler function that prints the message contents */ public static DataHandler printStringMessage(final int streamId) { return (buffer, offset, length, header) -> { final byte[] data = new byte[length]; buffer.getBytes(offset, data); System.out.println( String.format( "message to stream %d from session %x (%d@%d) <<%s>>", streamId, header.sessionId(), length, offset, new String(data))); }; } /** * Return a reusable, parameteried {@link DataHandler} that calls into a * {@link RateReporter}. * * @param reporter for the rate * @return {@link DataHandler} that records the rate information */ public static DataHandler rateReporterHandler(final RateReporter reporter) { return (buffer, offset, length, header) -> reporter.onMessage(1, length); } /** * Generic error handler that just prints message to stdout. * * @param channel for the error * @param streamId for the error * @param sessionId for the error, if source * @param message indicating what the error was * @param cause of the error */ public static void printError( final String channel, final int streamId, final int sessionId, final String message, final HeaderFlyweight cause) { System.out.println(message); } /** * Print the rates to stdout * * @param messagesPerSec being reported * @param bytesPerSec being reported * @param totalMessages being reported * @param totalBytes being reported */ public static void printRate( final double messagesPerSec, final double bytesPerSec, final long totalMessages, final long totalBytes) { System.out.println( String.format( "%.02g msgs/sec, %.02g bytes/sec, totals %d messages %d MB", messagesPerSec, bytesPerSec, totalMessages, totalBytes / (1024 * 1024))); } /** * Print the information for a new connection to stdout. * * @param channel for the connection * @param streamId for the stream * @param sessionId for the connection publication * @param sourceInformation that is transport specific */ public static void printNewConnection( final String channel, final int streamId, final int sessionId, final String sourceInformation) { System.out.println( String.format( "new connection on %s streamId %d sessionId %x from %s", channel, streamId, sessionId, sourceInformation)); } /** * Print the information for an inactive connection to stdout. * * @param channel for the connection * @param streamId for the stream * @param sessionId for the connection publication */ public static void printInactiveConnection(final String channel, final int streamId, final int sessionId) { System.out.println( String.format( "inactive connection on %s streamId %d sessionId %x", channel, streamId, sessionId)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.printer; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import javax.print.DocFlavor; import javax.print.attribute.standard.MediaSizeName; import javax.print.attribute.standard.OrientationRequested; import javax.print.attribute.standard.Sides; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; @UriParams public class PrinterConfiguration { private URI uri; @UriPath private String hostname; @UriPath private int port; @UriPath private String printername; @UriParam private String printerPrefix; @UriParam(defaultValue = "1") private int copies = 1; @UriParam private String flavor; @UriParam private DocFlavor docFlavor; @UriParam private String mimeType; @UriParam(defaultValue = "na-letter") private String mediaSize; @UriParam private MediaSizeName mediaSizeName; @UriParam(defaultValue = "one-sided") private String sides; @UriParam private Sides internalSides; @UriParam(defaultValue = "portrait") private String orientation; @UriParam private OrientationRequested internalOrientation; @UriParam(defaultValue = "true") private boolean sendToPrinter = true; @UriParam private String mediaTray; public PrinterConfiguration() { } public PrinterConfiguration(URI uri) throws URISyntaxException { this.uri = uri; } public void parseURI(URI uri) throws Exception { String protocol = uri.getScheme(); if (!protocol.equalsIgnoreCase("lpr")) { throw new IllegalArgumentException("Unrecognized Print protocol: " + protocol + " for uri: " + uri); } setUri(uri); setHostname(uri.getHost()); setPort(uri.getPort()); // use path as printer name, but without any leading slashes String path = uri.getPath(); path = ObjectHelper.removeStartingCharacters(path, '/'); path = ObjectHelper.removeStartingCharacters(path, '\\'); setPrintername(path); Map<String, Object> printSettings = URISupport.parseParameters(uri); setFlavor((String) printSettings.get("flavor")); setMimeType((String) printSettings.get("mimeType")); setDocFlavor(assignDocFlavor(flavor, mimeType)); setPrinterPrefix((String) printSettings.get("printerPrefix")); if (printSettings.containsKey("copies")) { setCopies(Integer.valueOf((String) printSettings.get("copies"))); } setMediaSize((String) printSettings.get("mediaSize")); setSides((String) printSettings.get("sides")); setOrientation((String) printSettings.get("orientation")); setMediaSizeName(assignMediaSize(mediaSize)); setInternalSides(assignSides(sides)); setInternalOrientation(assignOrientation(orientation)); if (printSettings.containsKey("sendToPrinter")) { if (!(Boolean.valueOf((String) printSettings.get("sendToPrinter")))) { setSendToPrinter(false); } } if (printSettings.containsKey("mediaTray")) { setMediaTray((String) printSettings.get("mediaTray")); } } private DocFlavor assignDocFlavor(String flavor, String mimeType) throws Exception { // defaults if (mimeType == null) { mimeType = "AUTOSENSE"; } if (flavor == null) { flavor = "DocFlavor.BYTE_ARRAY"; } DocFlavor d = DocFlavor.BYTE_ARRAY.AUTOSENSE; DocFlavorAssigner docFlavorAssigner = new DocFlavorAssigner(); if (mimeType.equalsIgnoreCase("AUTOSENSE")) { d = docFlavorAssigner.forMimeTypeAUTOSENSE(flavor); } else if (mimeType.equalsIgnoreCase("GIF")) { d = docFlavorAssigner.forMimeTypeGIF(flavor); } else if (mimeType.equalsIgnoreCase("JPEG")) { d = docFlavorAssigner.forMimeTypeJPEG(flavor); } else if (mimeType.equalsIgnoreCase("PDF")) { d = docFlavorAssigner.forMimeTypePDF(flavor); } else if (mimeType.equalsIgnoreCase("PCL")) { d = docFlavorAssigner.forMimeTypePCL(flavor); } else if (mimeType.equalsIgnoreCase("POSTSCRIPT")) { d = docFlavorAssigner.forMimeTypePOSTSCRIPT(flavor); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_HOST")) { d = docFlavorAssigner.forMimeTypeHOST(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_US_ASCII")) { d = docFlavorAssigner.forMimeTypeUSASCII(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16")) { d = docFlavorAssigner.forMimeTypeUTF16(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16LE")) { d = docFlavorAssigner.forMimeTypeUTF16LE(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_16BE")) { d = docFlavorAssigner.forMimeTypeUTF16BE(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML_UTF_8")) { d = docFlavorAssigner.forMimeTypeUTF8(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_HOST")) { d = docFlavorAssigner.forMimeTypeHOST(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_US_ASCII")) { d = docFlavorAssigner.forMimeTypeUSASCII(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16")) { d = docFlavorAssigner.forMimeTypeUTF16(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16LE")) { d = docFlavorAssigner.forMimeTypeUTF16LE(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_16BE")) { d = docFlavorAssigner.forMimeTypeUTF16BE(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN_UTF_8")) { d = docFlavorAssigner.forMimeTypeUTF8(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_HTML")) { d = docFlavorAssigner.forMimeTypeBasic(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("TEXT_PLAIN")) { d = docFlavorAssigner.forMimeTypeBasic(flavor, mimeType); } else if (mimeType.equalsIgnoreCase("PAGEABLE")) { d = docFlavorAssigner.forMimeTypePAGEABLE(flavor); } else if (mimeType.equalsIgnoreCase("PRINTABLE")) { d = docFlavorAssigner.forMimeTypePRINTABLE(flavor); } else if (mimeType.equalsIgnoreCase("RENDERABLE_IMAGE")) { d = docFlavorAssigner.forMimeTypeRENDERABLEIMAGE(flavor); } return d; } private MediaSizeName assignMediaSize(String size) { MediaSizeAssigner mediaSizeAssigner = new MediaSizeAssigner(); MediaSizeName answer; if (size == null) { // default to NA letter if no size configured answer = MediaSizeName.NA_LETTER; } else if (size.toLowerCase().startsWith("iso")) { answer = mediaSizeAssigner.selectMediaSizeNameISO(size); } else if (size.startsWith("jis")) { answer = mediaSizeAssigner.selectMediaSizeNameJIS(size); } else if (size.startsWith("na")) { answer = mediaSizeAssigner.selectMediaSizeNameNA(size); } else { answer = mediaSizeAssigner.selectMediaSizeNameOther(size); } return answer; } public Sides assignSides(String sidesString) { Sides answer; if (sidesString == null) { // default to one side if no slides configured answer = Sides.ONE_SIDED; } else if (sidesString.equalsIgnoreCase("one-sided")) { answer = Sides.ONE_SIDED; } else if (sidesString.equalsIgnoreCase("duplex")) { answer = Sides.DUPLEX; } else if (sidesString.equalsIgnoreCase("tumble")) { answer = Sides.TUMBLE; } else if (sidesString.equalsIgnoreCase("two-sided-short-edge")) { answer = Sides.TWO_SIDED_SHORT_EDGE; } else if (sidesString.equalsIgnoreCase("two-sided-long-edge")) { answer = Sides.TWO_SIDED_LONG_EDGE; } else { answer = Sides.ONE_SIDED; } return answer; } public OrientationRequested assignOrientation(final String orientation) { OrientationRequested answer; if (orientation == null) { // default to portrait answer = OrientationRequested.PORTRAIT; } else if (orientation.equalsIgnoreCase("portrait")) { answer = OrientationRequested.PORTRAIT; } else if (orientation.equalsIgnoreCase("landscape")) { answer = OrientationRequested.LANDSCAPE; } else if (orientation.equalsIgnoreCase("reverse-portrait")) { answer = OrientationRequested.REVERSE_PORTRAIT; } else if (orientation.equalsIgnoreCase("reverse-landscape")) { answer = OrientationRequested.REVERSE_LANDSCAPE; } else { answer = OrientationRequested.PORTRAIT; } return answer; } public URI getUri() { return uri; } public void setUri(URI uri) { this.uri = uri; } public String getHostname() { return hostname; } public void setHostname(String hostname) { this.hostname = hostname; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } public String getPrintername() { return printername; } public void setPrintername(String printername) { this.printername = printername; } public int getCopies() { return copies; } public void setCopies(int copies) { this.copies = copies; } public String getFlavor() { return flavor; } public void setFlavor(String flavor) { this.flavor = flavor; } public DocFlavor getDocFlavor() { return docFlavor; } public void setDocFlavor(DocFlavor docFlavor) { this.docFlavor = docFlavor; } public String getMediaSize() { return mediaSize; } public void setMediaSize(String mediaSize) { this.mediaSize = mediaSize; } public String getSides() { return sides; } public void setSides(String sides) { this.sides = sides; } public MediaSizeName getMediaSizeName() { return mediaSizeName; } public void setMediaSizeName(MediaSizeName mediaSizeName) { this.mediaSizeName = mediaSizeName; } public Sides getInternalSides() { return internalSides; } public void setInternalSides(Sides internalSides) { this.internalSides = internalSides; } public OrientationRequested getInternalOrientation() { return internalOrientation; } public void setInternalOrientation(OrientationRequested internalOrientation) { this.internalOrientation = internalOrientation; } public String getOrientation() { return orientation; } public void setOrientation(String orientation) { this.orientation = orientation; } public String getMimeType() { return mimeType; } public void setMimeType(String mimeType) { this.mimeType = mimeType; } public boolean isSendToPrinter() { return sendToPrinter; } public void setSendToPrinter(boolean sendToPrinter) { this.sendToPrinter = sendToPrinter; } public String getMediaTray() { return mediaTray; } public void setMediaTray(String mediaTray) { this.mediaTray = mediaTray; } public String getPrinterPrefix() { return printerPrefix; } public void setPrinterPrefix(String printerPrefix) { this.printerPrefix = printerPrefix; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.scheduler.multitenant; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.storm.scheduler.SchedulerAssignment; import org.apache.storm.scheduler.TopologyDetails; import org.apache.storm.scheduler.WorkerSlot; /** * A pool of machines that anyone can use, but topologies are not isolated */ public class DefaultPool extends NodePool { private static final Logger LOG = LoggerFactory.getLogger(DefaultPool.class); private Set<Node> _nodes = new HashSet<>(); private HashMap<String, TopologyDetails> _tds = new HashMap<>(); @Override public void addTopology(TopologyDetails td) { String topId = td.getId(); LOG.debug("Adding in Topology {}", topId); _tds.put(topId, td); SchedulerAssignment assignment = _cluster.getAssignmentById(topId); if (assignment != null) { for (WorkerSlot ws: assignment.getSlots()) { Node n = _nodeIdToNode.get(ws.getNodeId()); _nodes.add(n); } } } @Override public boolean canAdd(TopologyDetails td) { return true; } @Override public Collection<Node> takeNodes(int nodesNeeded) { HashSet<Node> ret = new HashSet<>(); LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n: sortedNodes) { if (nodesNeeded <= ret.size()) { break; } if (n.isAlive()) { n.freeAllSlots(_cluster); _nodes.remove(n); ret.add(n); } } return ret; } @Override public int nodesAvailable() { int total = 0; for (Node n: _nodes) { if (n.isAlive()) total++; } return total; } @Override public int slotsAvailable() { return Node.countTotalSlotsAlive(_nodes); } @Override public NodeAndSlotCounts getNodeAndSlotCountIfSlotsWereTaken(int slotsNeeded) { int nodesFound = 0; int slotsFound = 0; LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n: sortedNodes) { if (slotsNeeded <= 0) { break; } if (n.isAlive()) { nodesFound++; int totalSlotsFree = n.totalSlots(); slotsFound += totalSlotsFree; slotsNeeded -= totalSlotsFree; } } return new NodeAndSlotCounts(nodesFound, slotsFound); } @Override public Collection<Node> takeNodesBySlots(int slotsNeeded) { HashSet<Node> ret = new HashSet<>(); LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n: sortedNodes) { if (slotsNeeded <= 0) { break; } if (n.isAlive()) { n.freeAllSlots(_cluster); _nodes.remove(n); ret.add(n); slotsNeeded -= n.totalSlotsFree(); } } return ret; } @Override public void scheduleAsNeeded(NodePool... lesserPools) { for (TopologyDetails td : _tds.values()) { String topId = td.getId(); if (_cluster.needsScheduling(td)) { LOG.debug("Scheduling topology {}",topId); int totalTasks = td.getExecutors().size(); int origRequest = td.getNumWorkers(); int slotsRequested = Math.min(totalTasks, origRequest); int slotsUsed = Node.countSlotsUsed(topId, _nodes); int slotsFree = Node.countFreeSlotsAlive(_nodes); //Check to see if we have enough slots before trying to get them int slotsAvailable = 0; if (slotsRequested > slotsFree) { slotsAvailable = NodePool.slotsAvailable(lesserPools); } int slotsToUse = Math.min(slotsRequested - slotsUsed, slotsFree + slotsAvailable); int executorsNotRunning = _cluster.getUnassignedExecutors(td).size(); LOG.debug("Slots... requested {} used {} free {} available {} to be used {}, executors not running {}", slotsRequested, slotsUsed, slotsFree, slotsAvailable, slotsToUse, executorsNotRunning); if (slotsToUse <= 0) { if (executorsNotRunning > 0) { _cluster.setStatus(topId,"Not fully scheduled (No free slots in default pool) "+executorsNotRunning+" executors not scheduled"); } else { if (slotsUsed < slotsRequested) { _cluster.setStatus(topId,"Running with fewer slots than requested ("+slotsUsed+"/"+origRequest+")"); } else { //slotsUsed < origRequest _cluster.setStatus(topId,"Fully Scheduled (requested "+origRequest+" slots, but could only use "+slotsUsed+")"); } } continue; } int slotsNeeded = slotsToUse - slotsFree; if (slotsNeeded > 0) { _nodes.addAll(NodePool.takeNodesBySlot(slotsNeeded, lesserPools)); } if (executorsNotRunning <= 0) { //There are free slots that we can take advantage of now. for (Node n: _nodes) { n.freeTopology(topId, _cluster); } slotsFree = Node.countFreeSlotsAlive(_nodes); slotsToUse = Math.min(slotsRequested, slotsFree); } RoundRobinSlotScheduler slotSched = new RoundRobinSlotScheduler(td, slotsToUse, _cluster); LinkedList<Node> nodes = new LinkedList<>(_nodes); while (true) { Node n; do { if (nodes.isEmpty()) { throw new IllegalStateException("This should not happen, we" + " messed up and did not get enough slots"); } n = nodes.peekFirst(); if (n.totalSlotsFree() == 0) { nodes.remove(); n = null; } } while (n == null); if (!slotSched.assignSlotTo(n)) { break; } } int afterSchedSlotsUsed = Node.countSlotsUsed(topId, _nodes); if (afterSchedSlotsUsed < slotsRequested) { _cluster.setStatus(topId,"Running with fewer slots than requested ("+afterSchedSlotsUsed+"/"+origRequest+")"); } else if (afterSchedSlotsUsed < origRequest) { _cluster.setStatus(topId,"Fully Scheduled (requested "+origRequest+" slots, but could only use "+afterSchedSlotsUsed+")"); } else { _cluster.setStatus(topId,"Fully Scheduled"); } } else { _cluster.setStatus(topId,"Fully Scheduled"); } } } @Override public String toString() { return "DefaultPool " + _nodes.size() + " nodes " + _tds.size() + " topologies"; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import com.fasterxml.jackson.core.io.JsonStringEncoder; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentGenerator; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.io.Closeable; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Stream; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>> extends ESTestCase { public static final String STRING_FIELD_NAME = "mapped_string"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME}; private static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME, }; private static final int NUMBER_OF_TESTQUERIES = 20; protected static Version indexVersionCreated; private static ServiceHolder serviceHolder; private static int queryNameId = 0; private static Settings nodeSettings; private static Index index; private static String[] currentTypes; private static String[] randomTypes; protected static Index getIndex() { return index; } protected static String[] getCurrentTypes() { return currentTypes; } protected Collection<Class<? extends Plugin>> getPlugins() { return Collections.emptyList(); } protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { } @BeforeClass public static void beforeClass() { nodeSettings = Settings.builder() .put("node.name", AbstractQueryTestCase.class.toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_"); // Set a single type in the index switch (random().nextInt(3)) { case 0: currentTypes = new String[0]; // no types break; default: currentTypes = new String[] { "doc" }; break; } randomTypes = getRandomTypes(); } protected Settings indexSettings() { // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. indexVersionCreated = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), null, Version.CURRENT); return Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated) .build(); } @AfterClass public static void afterClass() throws Exception { IOUtils.close(serviceHolder); serviceHolder = null; } @Before public void beforeTest() throws IOException { if (serviceHolder == null) { serviceHolder = new ServiceHolder(nodeSettings, indexSettings(), getPlugins(), this); } serviceHolder.clientInvocationHandler.delegate = this; } private static SearchContext getSearchContext(String[] types, QueryShardContext context) { TestSearchContext testSearchContext = new TestSearchContext(context) { @Override public MapperService mapperService() { return serviceHolder.mapperService; // need to build / parse inner hits sort fields } @Override public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) { return serviceHolder.indexFieldDataService.getForField(fieldType); // need to build / parse inner hits sort fields } }; testSearchContext.getQueryShardContext().setTypes(types); return testSearchContext; } @After public void afterTest() { serviceHolder.clientInvocationHandler.delegate = null; } public final QB createTestQueryBuilder() { QB query = doCreateTestQueryBuilder(); //we should not set boost and query name for queries that don't parse it if (supportsBoostAndQueryName()) { if (randomBoolean()) { query.boost(2.0f / randomIntBetween(1, 20)); } if (randomBoolean()) { query.queryName(createUniqueRandomName()); } } return query; } /** * make sure query names are unique by suffixing them with increasing counter */ private static String createUniqueRandomName() { String queryName = randomAlphaOfLengthBetween(1, 10) + queryNameId; queryNameId++; return queryName; } /** * Create the query that is being tested */ protected abstract QB doCreateTestQueryBuilder(); /** * Generic test that creates new query from the test query and checks both for equality * and asserts equality on the two queries. */ public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QB testQuery = createTestQueryBuilder(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffledXContent = toShuffledXContent(testQuery, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean(), shuffleProtectedFields()); assertParsedQuery(createParser(xContentType.xContent(), shuffledXContent), testQuery); for (Map.Entry<String, QB> alternateVersion : getAlternateVersions().entrySet()) { String queryAsString = alternateVersion.getKey(); assertParsedQuery(createParser(JsonXContent.jsonXContent, queryAsString), alternateVersion.getValue()); } } } /** * Subclasses can override this method and return an array of fieldnames which should be protected from * recursive random shuffling in the {@link #testFromXContent()} test case */ protected String[] shuffleProtectedFields() { return Strings.EMPTY_ARRAY; } /** * Test that unknown field trigger ParsingException. * To find the right position in the root query, we add a marker as `queryName` which * all query builders support. The added bogus field after that should trigger the exception. * Queries that allow arbitrary field names at this level need to override this test. */ public void testUnknownField() { String marker = "#marker#"; QB testQuery; do { testQuery = createTestQueryBuilder(); } while (testQuery.toString().contains(marker)); testQuery.queryName(marker); // to find root query to add additional bogus field there String queryAsString = testQuery.toString().replace("\"" + marker + "\"", "\"" + marker + "\", \"bogusField\" : \"someValue\""); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryAsString)); // we'd like to see the offending field name here assertThat(e.getMessage(), containsString("bogusField")); } /** * Test that adding an additional object within each object of the otherwise correct query always triggers some kind of * parse exception. Some specific objects do not cause any exception as they can hold arbitrary content; they can be * declared by overriding {@link #getObjectsHoldingArbitraryContent()}. */ public final void testUnknownObjectException() throws IOException { Set<String> candidates = new HashSet<>(); // Adds the valid query to the list of queries to modify and test candidates.add(createTestQueryBuilder().toString()); // Adds the alternates versions of the query too candidates.addAll(getAlternateVersions().keySet()); List<Tuple<String, Boolean>> testQueries = alterateQueries(candidates, getObjectsHoldingArbitraryContent()); for (Tuple<String, Boolean> testQuery : testQueries) { boolean expectedException = testQuery.v2(); try { parseQuery(testQuery.v1()); if (expectedException) { fail("some parsing exception expected for query: " + testQuery); } } catch (ParsingException | ElasticsearchParseException e) { // different kinds of exception wordings depending on location // of mutation, so no simple asserts possible here if (expectedException == false) { throw new AssertionError("unexpected exception when parsing query:\n" + testQuery, e); } } catch (IllegalArgumentException e) { if (expectedException == false) { throw new AssertionError("unexpected exception when parsing query:\n" + testQuery, e); } assertThat(e.getMessage(), containsString("unknown field [newField], parser not found")); } } } /** * Traverses the json tree of the valid query provided as argument and mutates it one or more times by adding one object within each * object encountered. * * For instance given the following valid term query: * { * "term" : { * "field" : { * "value" : "foo" * } * } * } * * The following two mutations will be generated, and an exception is expected when trying to parse them: * { * "term" : { * "newField" : { * "field" : { * "value" : "foo" * } * } * } * } * * { * "term" : { * "field" : { * "newField" : { * "value" : "foo" * } * } * } * } * * Every mutation is then added to the list of results with a boolean flag indicating if a parsing exception is expected or not * for the mutation. Some specific objects do not cause any exception as they can hold arbitrary content; they are passed using the * arbitraryMarkers parameter. */ static List<Tuple<String, Boolean>> alterateQueries(Set<String> queries, Set<String> arbitraryMarkers) throws IOException { List<Tuple<String, Boolean>> results = new ArrayList<>(); // Indicate if a part of the query can hold any arbitrary content boolean hasArbitraryContent = (arbitraryMarkers != null && arbitraryMarkers.isEmpty() == false); for (String query : queries) { // Track the number of query mutations int mutation = 0; while (true) { boolean expectException = true; BytesStreamOutput out = new BytesStreamOutput(); try ( XContentGenerator generator = XContentType.JSON.xContent().createGenerator(out); XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, query); ) { int objectIndex = -1; Deque<String> levels = new LinkedList<>(); // Parse the valid query and inserts a new object level called "newField" XContentParser.Token token; while ((token = parser.nextToken()) != null) { if (token == XContentParser.Token.START_ARRAY) { levels.addLast(parser.currentName()); } else if (token == XContentParser.Token.START_OBJECT) { objectIndex++; levels.addLast(parser.currentName()); if (objectIndex == mutation) { // We reached the place in the object tree where we want to insert a new object level generator.writeStartObject(); generator.writeFieldName("newField"); XContentHelper.copyCurrentStructure(generator, parser); generator.writeEndObject(); if (hasArbitraryContent) { // The query has one or more fields that hold arbitrary content. If the current // field is one (or a child) of those, no exception is expected when parsing the mutated query. for (String marker : arbitraryMarkers) { if (levels.contains(marker)) { expectException = false; break; } } } // Jump to next token continue; } } else if (token == XContentParser.Token.END_OBJECT || token == XContentParser.Token.END_ARRAY) { levels.removeLast(); } // We are walking through the object tree, so we can safely copy the current node XContentHelper.copyCurrentEvent(generator, parser); } if (objectIndex < mutation) { // We did not reach the insertion point, there's no more mutations to try break; } else { // We reached the expected insertion point, so next time we'll try one step further mutation++; } } results.add(new Tuple<>(out.bytes().utf8ToString(), expectException)); } } return results; } /** * Returns a set of object names that won't trigger any exception (uncluding their children) when testing that unknown * objects cause parse exceptions through {@link #testUnknownObjectException()}. Default is an empty set. Can be overridden * by subclasses that test queries which contain objects that get parsed on the data nodes (e.g. score functions) or objects * that can contain arbitrary content (e.g. documents for percolate or more like this query, params for scripts). In such * cases no exception would get thrown. */ protected Set<String> getObjectsHoldingArbitraryContent() { return Collections.emptySet(); } /** * Test that wraps the randomly generated query into an array as follows: { "query_name" : [{}]} * This causes unexpected situations in parser code that may not be handled properly. */ public final void testQueryWrappedInArray() { QB queryBuilder = createTestQueryBuilder(); String queryName = queryBuilder.getName(); String validQuery = queryBuilder.toString(); queryWrappedInArrayTest(queryName, validQuery); for (String query : getAlternateVersions().keySet()) { queryWrappedInArrayTest(queryName, query); } } private void queryWrappedInArrayTest(String queryName, String validQuery) { int i = validQuery.indexOf("\"" + queryName + "\""); assertThat(i, greaterThan(0)); int insertionPosition; for (insertionPosition = i; insertionPosition < validQuery.length(); insertionPosition++) { if (validQuery.charAt(insertionPosition) == ':') { break; } } insertionPosition++; int endArrayPosition; for (endArrayPosition = validQuery.length() - 1; endArrayPosition >= 0; endArrayPosition--) { if (validQuery.charAt(endArrayPosition) == '}') { break; } } String testQuery = validQuery.substring(0, insertionPosition) + "[" + validQuery.substring(insertionPosition, endArrayPosition) + "]" + validQuery.substring(endArrayPosition, validQuery.length()); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(testQuery)); assertEquals("[" + queryName + "] query malformed, no start_object after query name", e.getMessage()); } /** * Returns alternate string representation of the query that need to be tested as they are never used as output * of {@link QueryBuilder#toXContent(XContentBuilder, ToXContent.Params)}. By default there are no alternate versions. */ protected Map<String, QB> getAlternateVersions() { return Collections.emptyMap(); } /** * Parses the query provided as string argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ protected void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { QueryBuilder newQuery = parseQuery(queryAsString); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); } /** * Parses the query provided as bytes argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ private static void assertParsedQuery(XContentParser parser, QueryBuilder expectedQuery) throws IOException { QueryBuilder newQuery = parseQuery(parser); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); } protected QueryBuilder parseQuery(AbstractQueryBuilder<?> builder) throws IOException { BytesReference bytes = XContentHelper.toXContent(builder, XContentType.JSON, false); return parseQuery(createParser(JsonXContent.jsonXContent, bytes)); } protected QueryBuilder parseQuery(String queryAsString) throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, queryAsString); return parseQuery(parser); } protected static QueryBuilder parseQuery(XContentParser parser) throws IOException { QueryBuilder parseInnerQueryBuilder = parseInnerQueryBuilder(parser); assertNull(parser.nextToken()); return parseInnerQueryBuilder; } /** * Whether the queries produced by this builder are expected to be cacheable. */ protected boolean builderGeneratesCacheableQueries() { return true; } /** * Test creates the {@link Query} from the {@link QueryBuilder} under test and delegates the * assertions being made on the result to the implementing subclass. */ public void testToQuery() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QueryShardContext context = createShardContext(); assert context.isCachable(); context.setAllowUnmappedFields(true); QB firstQuery = createTestQueryBuilder(); QB controlQuery = copyQuery(firstQuery); SearchContext searchContext = getSearchContext(randomTypes, context); /* we use a private rewrite context here since we want the most realistic way of asserting that we are cacheable or not. * We do it this way in SearchService where * we first rewrite the query with a private context, then reset the context and then build the actual lucene query*/ QueryBuilder rewritten = rewriteQuery(firstQuery, new QueryShardContext(context)); Query firstLuceneQuery = rewritten.toQuery(context); if (isCachable(firstQuery)) { assertTrue("query was marked as not cacheable in the context but this test indicates it should be cacheable: " + firstQuery.toString(), context.isCachable()); } else { assertFalse("query was marked as cacheable in the context but this test indicates it should not be cacheable: " + firstQuery.toString(), context.isCachable()); } assertNotNull("toQuery should not return null", firstLuceneQuery); assertLuceneQuery(firstQuery, firstLuceneQuery, searchContext); //remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well assertTrue( "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, firstQuery.equals(controlQuery)); assertTrue("equals is not symmetric after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, controlQuery.equals(firstQuery)); assertThat("query copy's hashcode is different from original hashcode after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, controlQuery.hashCode(), equalTo(firstQuery.hashCode())); QB secondQuery = copyQuery(firstQuery); // query _name never should affect the result of toQuery, we randomly set it to make sure if (randomBoolean()) { secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10)); } searchContext = getSearchContext(randomTypes, context); Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotNull("toQuery should not return null", secondLuceneQuery); assertLuceneQuery(secondQuery, secondLuceneQuery, searchContext); if (builderGeneratesCacheableQueries()) { assertEquals("two equivalent query builders lead to different lucene queries", rewrite(secondLuceneQuery), rewrite(firstLuceneQuery)); } if (supportsBoostAndQueryName()) { secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); Query thirdLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); assertNotEquals("modifying the boost doesn't affect the corresponding lucene query", rewrite(firstLuceneQuery), rewrite(thirdLuceneQuery)); } // check that context#isFilter is not changed by invoking toQuery/rewrite boolean filterFlag = randomBoolean(); context.setIsFilter(filterFlag); rewriteQuery(firstQuery, context).toQuery(context); assertEquals("isFilter should be unchanged", filterFlag, context.isFilter()); } } private QueryBuilder rewriteQuery(QB queryBuilder, QueryRewriteContext rewriteContext) throws IOException { QueryBuilder rewritten = rewriteAndFetch(queryBuilder, rewriteContext); // extra safety to fail fast - serialize the rewritten version to ensure it's serializable. assertSerialization(rewritten); return rewritten; } protected boolean isCachable(QB queryBuilder) { return true; } /** * Few queries allow you to set the boost and queryName on the java api, although the corresponding parser * doesn't parse them as they are not supported. This method allows to disable boost and queryName related tests for those queries. * Those queries are easy to identify: their parsers don't parse `boost` and `_name` as they don't apply to the specific query: * wrapper query and match_none */ protected boolean supportsBoostAndQueryName() { return true; } /** * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Verifies that named queries and boost are properly handled and delegates to * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, SearchContext)} for query specific checks. */ private void assertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException { if (queryBuilder.queryName() != null) { Query namedQuery = context.getQueryShardContext().copyNamedQueries().get(queryBuilder.queryName()); assertThat(namedQuery, equalTo(query)); } if (query != null) { if (queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) { assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(SpanBoostQuery.class))); if (query instanceof SpanBoostQuery) { SpanBoostQuery spanBoostQuery = (SpanBoostQuery) query; assertThat(spanBoostQuery.getBoost(), equalTo(queryBuilder.boost())); query = spanBoostQuery.getQuery(); } else { BoostQuery boostQuery = (BoostQuery) query; assertThat(boostQuery.getBoost(), equalTo(queryBuilder.boost())); query = boostQuery.getQuery(); } } } doAssertLuceneQuery(queryBuilder, query, context); } /** * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} * and {@link QueryShardContext}. Contains the query specific checks to be implemented by subclasses. */ protected abstract void doAssertLuceneQuery(QB queryBuilder, Query query, SearchContext context) throws IOException; protected static void assertTermOrBoostQuery(Query query, String field, String value, float fieldBoost) { if (fieldBoost != AbstractQueryBuilder.DEFAULT_BOOST) { assertThat(query, instanceOf(BoostQuery.class)); BoostQuery boostQuery = (BoostQuery) query; assertThat(boostQuery.getBoost(), equalTo(fieldBoost)); query = boostQuery.getQuery(); } assertTermQuery(query, field, value); } protected static void assertTermQuery(Query query, String field, String value) { assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; assertThat(termQuery.getTerm().field(), equalTo(field)); assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(value.toLowerCase(Locale.ROOT))); } /** * Test serialization and deserialization of the test query. */ public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QB testQuery = createTestQueryBuilder(); assertSerialization(testQuery); } } protected static QueryBuilder assertSerialization(QueryBuilder testQuery) throws IOException { return assertSerialization(testQuery, Version.CURRENT); } /** * Serialize the given query builder and asserts that both are equal */ protected static QueryBuilder assertSerialization(QueryBuilder testQuery, Version version) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.setVersion(version); output.writeNamedWriteable(testQuery); try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), serviceHolder.namedWriteableRegistry)) { in.setVersion(version); QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); assertEquals(testQuery, deserializedQuery); assertEquals(testQuery.hashCode(), deserializedQuery.hashCode()); assertNotSame(testQuery, deserializedQuery); return deserializedQuery; } } } public void testEqualsAndHashcode() { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { // TODO we only change name and boost, we should extend by any sub-test supplying a "mutate" method that randomly changes one // aspect of the object under test checkEqualsAndHashCode(createTestQueryBuilder(), this::copyQuery, this::changeNameOrBoost); } } /** * Generic test that checks that the <code>Strings.toString()</code> method * renders the XContent correctly. */ public void testValidOutput() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTQUERIES; runs++) { QB testQuery = createTestQueryBuilder(); XContentType xContentType = XContentType.JSON; String toString = Strings.toString(testQuery); assertParsedQuery(createParser(xContentType.xContent(), toString), testQuery); BytesReference bytes = XContentHelper.toXContent(testQuery, xContentType, false); assertParsedQuery(createParser(xContentType.xContent(), bytes), testQuery); } } private QB changeNameOrBoost(QB original) throws IOException { QB secondQuery = copyQuery(original); if (randomBoolean()) { secondQuery.queryName(secondQuery.queryName() == null ? randomAlphaOfLengthBetween(1, 30) : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10)); } else { secondQuery.boost(original.boost() + 1f + randomFloat()); } return secondQuery; } //we use the streaming infra to create a copy of the query provided as argument @SuppressWarnings("unchecked") private QB copyQuery(QB query) throws IOException { Reader<QB> reader = (Reader<QB>) serviceHolder.namedWriteableRegistry.getReader(QueryBuilder.class, query.getWriteableName()); return copyWriteable(query, serviceHolder.namedWriteableRegistry, reader); } /** * @return a new {@link QueryShardContext} based on the base test index and queryParserService */ protected static QueryShardContext createShardContext() { return serviceHolder.createShardContext(); } /** * create a random value for either {@link AbstractQueryTestCase#BOOLEAN_FIELD_NAME}, {@link AbstractQueryTestCase#INT_FIELD_NAME}, * {@link AbstractQueryTestCase#DOUBLE_FIELD_NAME}, {@link AbstractQueryTestCase#STRING_FIELD_NAME} or * {@link AbstractQueryTestCase#DATE_FIELD_NAME}, or a String value by default */ protected static Object getRandomValueForFieldName(String fieldName) { Object value; switch (fieldName) { case STRING_FIELD_NAME: if (rarely()) { // unicode in 10% cases JsonStringEncoder encoder = JsonStringEncoder.getInstance(); value = new String(encoder.quoteAsString(randomUnicodeOfLength(10))); } else { value = randomAlphaOfLengthBetween(1, 10); } break; case INT_FIELD_NAME: value = randomIntBetween(0, 10); break; case DOUBLE_FIELD_NAME: value = 1 + randomDouble() * 9; break; case BOOLEAN_FIELD_NAME: value = randomBoolean(); break; case DATE_FIELD_NAME: value = new DateTime(System.currentTimeMillis(), DateTimeZone.UTC).toString(); break; default: value = randomAlphaOfLengthBetween(1, 10); } return value; } protected static String getRandomQueryText() { int terms = randomIntBetween(0, 3); StringBuilder builder = new StringBuilder(); for (int i = 0; i < terms; i++) { builder.append(randomAlphaOfLengthBetween(1, 10)).append(" "); } return builder.toString().trim(); } /** * Helper method to return a mapped or a random field */ protected static String getRandomFieldName() { // if no type is set then return a random field name if (currentTypes.length == 0 || randomBoolean()) { return randomAlphaOfLengthBetween(1, 10); } return randomFrom(MAPPED_LEAF_FIELD_NAMES); } /** * Helper method to return a random rewrite method */ protected static String getRandomRewriteMethod() { String rewrite; if (randomBoolean()) { rewrite = randomFrom(QueryParsers.CONSTANT_SCORE, QueryParsers.SCORING_BOOLEAN, QueryParsers.CONSTANT_SCORE_BOOLEAN).getPreferredName(); } else { rewrite = randomFrom(QueryParsers.TOP_TERMS, QueryParsers.TOP_TERMS_BOOST, QueryParsers.TOP_TERMS_BLENDED_FREQS).getPreferredName() + "1"; } return rewrite; } private static String[] getRandomTypes() { String[] types; if (currentTypes.length > 0 && randomBoolean()) { int numberOfQueryTypes = randomIntBetween(1, currentTypes.length); types = new String[numberOfQueryTypes]; for (int i = 0; i < numberOfQueryTypes; i++) { types[i] = randomFrom(currentTypes); } } else { if (randomBoolean()) { types = new String[]{MetaData.ALL}; } else { types = new String[0]; } } return types; } protected static Fuzziness randomFuzziness(String fieldName) { switch (fieldName) { case INT_FIELD_NAME: return Fuzziness.build(randomIntBetween(3, 100)); case DOUBLE_FIELD_NAME: return Fuzziness.build(1 + randomFloat() * 10); case DATE_FIELD_NAME: return Fuzziness.build(randomTimeValue()); default: if (randomBoolean()) { return Fuzziness.fromEdits(randomIntBetween(0, 2)); } return Fuzziness.AUTO; } } protected static String randomAnalyzer() { return randomFrom("simple", "standard", "keyword", "whitespace"); } protected static String randomMinimumShouldMatch() { return randomFrom("1", "-1", "75%", "-25%", "2<75%", "2<-25%"); } private static class ClientInvocationHandler implements InvocationHandler { AbstractQueryTestCase<?> delegate; @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.equals(Client.class.getMethod("get", GetRequest.class, ActionListener.class))){ GetResponse getResponse = delegate.executeGet((GetRequest) args[0]); ActionListener<GetResponse> listener = (ActionListener<GetResponse>) args[1]; if (randomBoolean()) { listener.onResponse(getResponse); } else { new Thread(() -> listener.onResponse(getResponse)).start(); } return null; } else if (method.equals(Client.class.getMethod ("multiTermVectors", MultiTermVectorsRequest.class))) { return new PlainActionFuture<MultiTermVectorsResponse>() { @Override public MultiTermVectorsResponse get() throws InterruptedException, ExecutionException { return delegate.executeMultiTermVectors((MultiTermVectorsRequest) args[0]); } }; } else if (method.equals(Object.class.getMethod("toString"))) { return "MockClient"; } throw new UnsupportedOperationException("this test can't handle calls to: " + method); } } /** * Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders */ protected GetResponse executeGet(GetRequest getRequest) { throw new UnsupportedOperationException("this test can't handle GET requests"); } /** * Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders */ protected MultiTermVectorsResponse executeMultiTermVectors(MultiTermVectorsRequest mtvRequest) { throw new UnsupportedOperationException("this test can't handle MultiTermVector requests"); } /** * Call this method to check a valid json string representing the query under test against * it's generated json. * * Note: By the time of this writing (Nov 2015) all queries are taken from the query dsl * reference docs mirroring examples there. Here's how the queries were generated: * * <ul> * <li> Take a reference documentation example. * <li> Stick it into the createParseableQueryJson method of the respective query test. * <li> Manually check that what the QueryBuilder generates equals the input json ignoring default options. * <li> Put the manual checks into the assertQueryParsedFromJson method. * <li> Now copy the generated json including default options into createParseableQueryJson * <li> By now the roundtrip check for the json should be happy. * </ul> **/ public static void checkGeneratedJson(String expected, QueryBuilder source) throws IOException { // now assert that we actually generate the same JSON XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); source.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals( msg(expected, builder.string()), expected.replaceAll("\\s+", ""), builder.string().replaceAll("\\s+", "")); } private static String msg(String left, String right) { int size = Math.min(left.length(), right.length()); StringBuilder builder = new StringBuilder("size: " + left.length() + " vs. " + right.length()); builder.append(" content: <<"); for (int i = 0; i < size; i++) { if (left.charAt(i) == right.charAt(i)) { builder.append(left.charAt(i)); } else { builder.append(">> ").append("until offset: ").append(i) .append(" [").append(left.charAt(i)).append(" vs.").append(right.charAt(i)) .append("] [").append((int) left.charAt(i)).append(" vs.").append((int) right.charAt(i)).append(']'); return builder.toString(); } } if (left.length() != right.length()) { int leftEnd = Math.max(size, left.length()) - 1; int rightEnd = Math.max(size, right.length()) - 1; builder.append(">> ").append("until offset: ").append(size) .append(" [").append(left.charAt(leftEnd)).append(" vs.").append(right.charAt(rightEnd)) .append("] [").append((int) left.charAt(leftEnd)).append(" vs.").append((int) right.charAt(rightEnd)).append(']'); return builder.toString(); } return ""; } /** * This test ensures that queries that need to be rewritten have dedicated tests. * These queries must override this method accordingly. */ public void testMustRewrite() throws IOException { QueryShardContext context = createShardContext(); context.setAllowUnmappedFields(true); QB queryBuilder = createTestQueryBuilder(); queryBuilder.toQuery(context); } protected Query rewrite(Query query) throws IOException { return query; } @Override protected NamedXContentRegistry xContentRegistry() { return serviceHolder.xContentRegistry; } private static class ServiceHolder implements Closeable { private final IndexFieldDataService indexFieldDataService; private final SearchModule searchModule; private final NamedWriteableRegistry namedWriteableRegistry; private final NamedXContentRegistry xContentRegistry; private final ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler(); private final IndexSettings idxSettings; private final SimilarityService similarityService; private final MapperService mapperService; private final BitsetFilterCache bitsetFilterCache; private final ScriptService scriptService; private final Client client; private final long nowInMillis = randomNonNegativeLong(); ServiceHolder(Settings nodeSettings, Settings indexSettings, Collection<Class<? extends Plugin>> plugins, AbstractQueryTestCase<?> testCase) throws IOException { Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings); PluginsService pluginsService; pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins); client = (Client) Proxy.newProxyInstance( Client.class.getClassLoader(), new Class[]{Client.class}, clientInvocationHandler); ScriptModule scriptModule = createScriptModule(pluginsService.filterPlugins(ScriptPlugin.class)); List<Setting<?>> additionalSettings = pluginsService.getPluginSettings(); additionalSettings.add(InternalSettingsPlugin.VERSION_CREATED); SettingsModule settingsModule = new SettingsModule(nodeSettings, additionalSettings, pluginsService.getPluginSettingsFilter()); searchModule = new SearchModule(nodeSettings, false, pluginsService.filterPlugins(SearchPlugin.class)); IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)); List<NamedWriteableRegistry.Entry> entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); namedWriteableRegistry = new NamedWriteableRegistry(entries); xContentRegistry = new NamedXContentRegistry(Stream.of( searchModule.getNamedXContents().stream() ).flatMap(Function.identity()).collect(toList())); IndexScopedSettings indexScopedSettings = settingsModule.getIndexScopedSettings(); idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); AnalysisModule analysisModule = new AnalysisModule(TestEnvironment.newEnvironment(nodeSettings), emptyList()); IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings); scriptService = scriptModule.getScriptService(); similarityService = new SimilarityService(idxSettings, null, Collections.emptyMap()); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); mapperService = new MapperService(idxSettings, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, this::createShardContext); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, new NoneCircuitBreakerService(), mapperService); bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { @Override public void onCache(ShardId shardId, Accountable accountable) { } @Override public void onRemoval(ShardId shardId, Accountable accountable) { } }); for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), MapperService.MergeReason.MAPPING_UPDATE, false); } testCase.initializeAdditionalMappings(mapperService); } @Override public void close() throws IOException { } QueryShardContext createShardContext() { return new QueryShardContext(0, idxSettings, bitsetFilterCache, indexFieldDataService::getForField, mapperService, similarityService, scriptService, xContentRegistry, namedWriteableRegistry, this.client, null, () -> nowInMillis, null); } ScriptModule createScriptModule(List<ScriptPlugin> scriptPlugins) { if (scriptPlugins == null || scriptPlugins.isEmpty()) { return newTestScriptModule(); } return new ScriptModule(Settings.EMPTY, scriptPlugins); } } protected QueryBuilder rewriteAndFetch(QueryBuilder builder, QueryRewriteContext context) throws IOException { PlainActionFuture<QueryBuilder> future = new PlainActionFuture<>(); Rewriteable.rewriteAndFetch(builder, context, future); return future.actionGet(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.mapReduceLayer; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.PigException; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROpPlanVisitor; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.PODemux; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLocalRearrange; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POMultiQueryPackage; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POPackage; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore; import org.apache.pig.data.DataType; import org.apache.pig.impl.io.PigNullableWritable; import org.apache.pig.impl.plan.NodeIdGenerator; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.PlanException; import org.apache.pig.impl.plan.ReverseDependencyOrderWalker; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.impl.plan.optimizer.OptimizerException; import org.apache.pig.impl.util.Pair; /** * An optimizer that merges all or part splittee MapReduceOpers into * splitter MapReduceOper. * <p> * The merge can produce a MROperPlan that has * fewer MapReduceOpers than MapReduceOpers in the original MROperPlan. * <p> * The MRCompler generates multiple MapReduceOpers whenever it encounters * a split operator and connects the single splitter MapReduceOper to * one or more splittee MapReduceOpers using store/load operators: * <p> * ---- POStore (in splitter) -... ---- * | | ... | * | | ... | * POLoad POLoad ... POLoad (in splittees) * | | | * <p> * This optimizer merges those MapReduceOpers by replacing POLoad/POStore * combination with POSplit operator. */ class MultiQueryOptimizer extends MROpPlanVisitor { private Log log = LogFactory.getLog(getClass()); private NodeIdGenerator nig; private String scope; MultiQueryOptimizer(MROperPlan plan) { super(plan, new ReverseDependencyOrderWalker<MapReduceOper, MROperPlan>(plan)); nig = NodeIdGenerator.getGenerator(); List<MapReduceOper> roots = plan.getRoots(); scope = roots.get(0).getOperatorKey().getScope(); log.info("MR plan size before optimization: " + plan.size()); } @Override public void visit() throws VisitorException { super.visit(); log.info("MR plan size after optimization: " + mPlan.size()); } @Override public void visitMROp(MapReduceOper mr) throws VisitorException { if (!mr.isSplitter()) { return; } // first classify all the splittees List<MapReduceOper> mappers = new ArrayList<MapReduceOper>(); List<MapReduceOper> multiLoadMROpers = new ArrayList<MapReduceOper>(); List<MapReduceOper> mapReducers = new ArrayList<MapReduceOper>(); List<MapReduceOper> successors = getPlan().getSuccessors(mr); for (MapReduceOper successor : successors) { if (successor.getUseSecondaryKey()) { log.debug("Splittee " + successor.getOperatorKey().getId() + " uses secondary key, do not merge it"); continue; } if (isMapOnly(successor)) { if (isSingleLoadMapperPlan(successor.mapPlan) && isSinglePredecessor(successor)) { mappers.add(successor); } else { multiLoadMROpers.add(successor); } } else { if (isSingleLoadMapperPlan(successor.mapPlan) && isSinglePredecessor(successor)) { mapReducers.add(successor); } else { multiLoadMROpers.add(successor); } } } int numSplittees = successors.size(); // case 1: exactly one splittee and it's map-only if (mappers.size() == 1 && numSplittees == 1) { mergeOnlyMapperSplittee(mappers.get(0), mr); log.info("Merged the only map-only splittee."); return; } // case 2: exactly one splittee and it has reducer if (isMapOnly(mr) && mapReducers.size() == 1 && numSplittees == 1) { mergeOnlyMapReduceSplittee(mapReducers.get(0), mr); log.info("Merged the only map-reduce splittee."); return; } int numMerges = 0; PhysicalPlan splitterPl = isMapOnly(mr) ? mr.mapPlan : mr.reducePlan; POStore storeOp = (POStore)splitterPl.getLeaves().get(0); POSplit splitOp = null; // case 3: multiple splittees and at least one of them is map-only if (mappers.size() > 0) { splitOp = getSplit(); int n = mergeAllMapOnlySplittees(mappers, mr, splitOp); log.info("Merged " + n + " map-only splittees."); numMerges += n; } if (mapReducers.size() > 0) { boolean isMapOnly = isMapOnly(mr); int merged = 0; // case 4: multiple splittees and at least one of them has reducer // and the splitter is map-only if (isMapOnly) { PhysicalOperator leaf = splitterPl.getLeaves().get(0); splitOp = (leaf instanceof POStore) ? getSplit() : (POSplit)leaf; merged = mergeMapReduceSplittees(mapReducers, mr, splitOp); } // case 5: multiple splittees and at least one of them has reducer // and splitter has reducer else { merged = mergeMapReduceSplittees(mapReducers, mr); } log.info("Merged " + merged + " map-reduce splittees."); numMerges += merged; } // Finally, add original store to the split operator // if there is splittee that hasn't been merged into the splitter if (splitOp != null && (numMerges < numSplittees)) { PhysicalPlan storePlan = new PhysicalPlan(); try { storePlan.addAsLeaf(storeOp); splitOp.addPlan(storePlan); } catch (PlanException e) { int errCode = 2129; String msg = "Internal Error. Unable to add store to the split plan for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } // case 6: special diamond case with trivial MR operator at the head if (numMerges == 0 && isDiamondMROper(mr)) { int merged = mergeDiamondMROper(mr, getPlan().getSuccessors(mr)); log.info("Merged " + merged + " diamond splitter."); numMerges += merged; } log.info("Merged " + numMerges + " out of total " + (numSplittees +1) + " MR operators."); } private boolean isDiamondMROper(MapReduceOper mr) { // We'll remove this mr as part of diamond query optimization // only if this mr is a trivial one, that is, it's plan // has either two operators (load followed by store) or three operators // (the operator between the load and store must be a foreach, // introduced by casting operation). // // We won't optimize in other cases where there're more operators // in the plan. Otherwise those operators world run multiple times // in the successor MR operators which may not give better // performance. boolean rtn = false; if (isMapOnly(mr)) { PhysicalPlan pl = mr.mapPlan; if (pl.size() == 2 || pl.size() == 3) { PhysicalOperator root = pl.getRoots().get(0); PhysicalOperator leaf = pl.getLeaves().get(0); if (root instanceof POLoad && leaf instanceof POStore) { if (pl.size() == 3) { PhysicalOperator mid = pl.getSuccessors(root).get(0); if (mid instanceof POForEach) { rtn = true; } } else { rtn = true; } } } } return rtn; } private int mergeDiamondMROper(MapReduceOper mr, List<MapReduceOper> succs) throws VisitorException { // Only consider the cases where all inputs of the splittees are // from the splitter for (MapReduceOper succ : succs) { List<MapReduceOper> preds = getPlan().getPredecessors(succ); if (preds.size() != 1) { return 0; } } // first, remove the store operator from the splitter PhysicalPlan pl = mr.mapPlan; PhysicalOperator leaf = mr.mapPlan.getLeaves().get(0); pl.remove(leaf); POStore store = (POStore)leaf; String ofile = store.getSFile().getFileName(); // then connect the remaining map plan to the successor of // each root (load) operator of the splittee for (MapReduceOper succ : succs) { List<PhysicalOperator> roots = succ.mapPlan.getRoots(); ArrayList<PhysicalOperator> rootsCopy = new ArrayList<PhysicalOperator>(roots); for (PhysicalOperator op : rootsCopy) { POLoad load = (POLoad)op; String ifile = load.getLFile().getFileName(); if (ofile.compareTo(ifile) != 0) { continue; } PhysicalOperator opSucc = succ.mapPlan.getSuccessors(op).get(0); PhysicalPlan clone = null; try { clone = pl.clone(); } catch (CloneNotSupportedException e) { int errCode = 2127; String msg = "Internal Error: Cloning of plan failed for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } succ.mapPlan.remove(op); while (!clone.isEmpty()) { PhysicalOperator oper = clone.getLeaves().get(0); clone.remove(oper); succ.mapPlan.add(oper); try { succ.mapPlan.connect(oper, opSucc); opSucc = oper; } catch (PlanException e) { int errCode = 2131; String msg = "Internal Error. Unable to connect split plan for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } } // finally, remove the splitter from the MR plan List<MapReduceOper> mrPreds = getPlan().getPredecessors(mr); if (mrPreds != null) { for (MapReduceOper pred : mrPreds) { for (MapReduceOper succ : succs) { try { getPlan().connect(pred, succ); } catch (PlanException e) { int errCode = 2131; String msg = "Internal Error. Unable to connect split plan for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } } getPlan().remove(mr); return 1; } private void mergeOneMapPart(MapReduceOper mapper, MapReduceOper splitter) throws VisitorException { PhysicalPlan splitterPl = isMapOnly(splitter) ? splitter.mapPlan : splitter.reducePlan; POStore storeOp = (POStore)splitterPl.getLeaves().get(0); List<PhysicalOperator> storePreds = splitterPl.getPredecessors(storeOp); PhysicalPlan pl = mapper.mapPlan; PhysicalOperator load = pl.getRoots().get(0); pl.remove(load); // make a copy before removing the store operator List<PhysicalOperator> predsCopy = new ArrayList<PhysicalOperator>(storePreds); splitterPl.remove(storeOp); try { splitterPl.merge(pl); } catch (PlanException e) { int errCode = 2130; String msg = "Internal Error. Unable to merge split plans for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // connect two plans List<PhysicalOperator> roots = pl.getRoots(); for (PhysicalOperator pred : predsCopy) { for (PhysicalOperator root : roots) { try { splitterPl.connect(pred, root); } catch (PlanException e) { int errCode = 2131; String msg = "Internal Error. Unable to connect split plan for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } } private void mergeOnlyMapperSplittee(MapReduceOper mapper, MapReduceOper splitter) throws VisitorException { mergeOneMapPart(mapper, splitter); removeAndReconnect(mapper, splitter); } private void mergeOnlyMapReduceSplittee(MapReduceOper mapReducer, MapReduceOper splitter) throws VisitorException { mergeOneMapPart(mapReducer, splitter); splitter.setMapDone(true); splitter.reducePlan = mapReducer.reducePlan; splitter.setReduceDone(true); removeAndReconnect(mapReducer, splitter); } private int mergeAllMapOnlySplittees(List<MapReduceOper> mappers, MapReduceOper splitter, POSplit splitOp) throws VisitorException { PhysicalPlan splitterPl = isMapOnly(splitter) ? splitter.mapPlan : splitter.reducePlan; PhysicalOperator storeOp = splitterPl.getLeaves().get(0); List<PhysicalOperator> storePreds = splitterPl.getPredecessors(storeOp); // merge splitee's map plans into nested plan of // the split operator for (MapReduceOper mapper : mappers) { PhysicalPlan pl = mapper.mapPlan; PhysicalOperator load = pl.getRoots().get(0); pl.remove(load); splitOp.addPlan(pl); } // replace store operator in the splitter with split operator splitOp.setInputs(storePreds); try { splitterPl.replace(storeOp, splitOp);; } catch (PlanException e) { int errCode = 2132; String msg = "Internal Error. Unable to replace store with split operator for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // remove all the map-only splittees from the MROperPlan for (MapReduceOper mapper : mappers) { removeAndReconnect(mapper, splitter); } return mappers.size(); } private boolean isSplitteeMergeable(MapReduceOper splittee) { // cannot be global sort or limit after sort, they are // using a different partitioner if (splittee.isGlobalSort() || splittee.isLimitAfterSort()) { log.info("Cannot merge this splittee: " + "it is global sort or limit after sort"); return false; } // check the plan leaf: only merge local rearrange or split PhysicalOperator leaf = splittee.mapPlan.getLeaves().get(0); if (!(leaf instanceof POLocalRearrange) && ! (leaf instanceof POSplit)) { log.info("Cannot merge this splittee: " + "its map plan doesn't end with LR or Split operator: " + leaf.getClass().getName()); return false; } // cannot have distinct combiner, it uses a different combiner if (splittee.needsDistinctCombiner()) { log.info("Cannot merge this splittee: " + "it has distinct combiner."); return false; } return true; } private List<MapReduceOper> getMergeList(MapReduceOper splitter, List<MapReduceOper> mapReducers) { List<MapReduceOper> mergeNoCmbList = new ArrayList<MapReduceOper>(); List<MapReduceOper> mergeCmbList = new ArrayList<MapReduceOper>(); List<MapReduceOper> mergeDistList = new ArrayList<MapReduceOper>(); for (MapReduceOper mrOp : mapReducers) { if (isSplitteeMergeable(mrOp)) { if (mrOp.combinePlan.isEmpty()) { mergeNoCmbList.add(mrOp); } else { mergeCmbList.add(mrOp); } } else if (splitter.reducePlan.isEmpty() || splitter.needsDistinctCombiner()) { if (mrOp.needsDistinctCombiner()) { mergeDistList.add(mrOp); } } } int max = Math.max(mergeNoCmbList.size(), mergeCmbList.size()); max = Math.max(max, mergeDistList.size()); if (max == mergeDistList.size()) return mergeDistList; else if (max == mergeNoCmbList.size()) return mergeNoCmbList; else return mergeCmbList; } private int mergeMapReduceSplittees(List<MapReduceOper> mapReducers, MapReduceOper splitter, POSplit splitOp) throws VisitorException { List<MapReduceOper> mergeList = getMergeList(splitter, mapReducers); if (mergeList.size() <= 1) { // chose one to merge, prefer the one with a combiner MapReduceOper mapReducer = mapReducers.get(0); for (MapReduceOper mro : mapReducers) { if (!mro.combinePlan.isEmpty()) { mapReducer = mro; break; } } mergeList.clear(); mergeList.add(mapReducer); } if (mergeList.size() == 1) { mergeSingleMapReduceSplittee(mergeList.get(0), splitter, splitOp); } else { mergeAllMapReduceSplittees(mergeList, splitter, splitOp); } return mergeList.size(); } private int mergeMapReduceSplittees(List<MapReduceOper> mapReducers, MapReduceOper splitter) throws VisitorException { // In this case the splitter has non-empty reducer so we can't merge // MR splittees into the splitter. What we'll do is to merge multiple // splittees (if exists) into a new MR operator and connect it to the splitter. List<MapReduceOper> mergeList = getMergeList(splitter, mapReducers); if (mergeList.size() <= 1) { // nothing to merge, just return return 0; } MapReduceOper mrOper = getMROper(); MapReduceOper splittee = mergeList.get(0); PhysicalPlan pl = splittee.mapPlan; POLoad load = (POLoad)pl.getRoots().get(0); mrOper.mapPlan.add(load); // add a dummy store operator, it'll be replaced by the split operator later. try { mrOper.mapPlan.addAsLeaf(getStore()); } catch (PlanException e) { int errCode = 2137; String msg = "Internal Error. Unable to add store to the plan as leaf for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // connect the new MR operator to the splitter try { getPlan().add(mrOper); getPlan().connect(splitter, mrOper); } catch (PlanException e) { int errCode = 2133; String msg = "Internal Error. Unable to connect splitter with successors for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // merger the splittees into the new MR operator mergeAllMapReduceSplittees(mergeList, mrOper, getSplit()); return (mergeList.size() - 1); } private boolean hasSameMapKeyType(List<MapReduceOper> splittees) { boolean sameKeyType = true; for (MapReduceOper outer : splittees) { for (MapReduceOper inner : splittees) { if (inner.mapKeyType != outer.mapKeyType) { sameKeyType = false; break; } } if (!sameKeyType) break; } return sameKeyType; } private int setIndexOnLRInSplit(int initial, POSplit splitOp, boolean sameKeyType) throws VisitorException { int index = initial; List<PhysicalPlan> pls = splitOp.getPlans(); for (PhysicalPlan pl : pls) { PhysicalOperator leaf = pl.getLeaves().get(0); if (leaf instanceof POLocalRearrange) { POLocalRearrange lr = (POLocalRearrange)leaf; try { lr.setMultiQueryIndex(index++); } catch (ExecException e) { int errCode = 2136; String msg = "Internal Error. Unable to set multi-query index for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // change the map key type to tuple when // multiple splittees have different map key types if (!sameKeyType) { lr.setKeyType(DataType.TUPLE); } } else if (leaf instanceof POSplit) { POSplit spl = (POSplit)leaf; index = setIndexOnLRInSplit(index, spl, sameKeyType); } } return index; } private int mergeOneMapPlanWithIndex(PhysicalPlan pl, POSplit splitOp, int index, boolean sameKeyType) throws VisitorException { PhysicalOperator load = pl.getRoots().get(0); pl.remove(load); int curIndex = index; PhysicalOperator leaf = pl.getLeaves().get(0); if (leaf instanceof POLocalRearrange) { POLocalRearrange lr = (POLocalRearrange)leaf; try { lr.setMultiQueryIndex(curIndex++); } catch (ExecException e) { int errCode = 2136; String msg = "Internal Error. Unable to set multi-query index for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // change the map key type to tuple when // multiple splittees have different map key types if (!sameKeyType) { lr.setKeyType(DataType.TUPLE); } } else if (leaf instanceof POSplit) { // if the map plan that we are trying to merge // has a split, we need to update the indices of // the POLocalRearrange operators in the inner plans // of the split to be a continuation of the index // number sequence we are currently at. // So for example, if we we are in the MapRedOper // we are currently processing, if the index is currently // at 1 (meaning index 0 was used for a map plan // merged earlier), then we want the POLocalRearrange // operators in the split to have indices 1, 2 ... // essentially we are flattening the index numbers // across all POLocalRearranges in all merged map plans // including nested ones in POSplit POSplit spl = (POSplit)leaf; curIndex = setIndexOnLRInSplit(index, spl, sameKeyType); } splitOp.addPlan(pl); // return the updated index after setting index // on all POLocalRearranges including ones // in inner plans of any POSplit operators return curIndex; } private void mergeOneReducePlanWithIndex(PhysicalPlan from, PhysicalPlan to, int initial, int current, byte mapKeyType) throws VisitorException { POPackage pk = (POPackage)from.getRoots().get(0); from.remove(pk); if(!(pk instanceof POMultiQueryPackage)){ // XXX the index of the original keyInfo map is always 0, // we need to shift the index so that the lookups works // with the new indexed key addShiftedKeyInfoIndex(initial, pk); } int total = current - initial; POMultiQueryPackage pkg = (POMultiQueryPackage)to.getRoots().get(0); int pkCount = 0; if (pk instanceof POMultiQueryPackage) { List<POPackage> pkgs = ((POMultiQueryPackage)pk).getPackages(); for (POPackage p : pkgs) { pkg.addPackage(p); pkCount++; } pkg.addIsKeyWrappedList(((POMultiQueryPackage)pk).getIsKeyWrappedList()); addShiftedKeyInfoIndex(initial, current, (POMultiQueryPackage)pk); } else { pkg.addPackage(pk, mapKeyType); pkCount = 1; } if (pkCount != total) { int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } PODemux demux = (PODemux)to.getLeaves().get(0); int plCount = 0; PhysicalOperator root = from.getRoots().get(0); if (root instanceof PODemux) { // flattening the inner plans of the demux operator. // This is based on the fact that if a plan has a demux // operator, then it's the only operator in the plan. List<PhysicalPlan> pls = ((PODemux)root).getPlans(); for (PhysicalPlan pl : pls) { demux.addPlan(pl); plCount++; } } else { demux.addPlan(from); plCount = 1; } if (plCount != total) { int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } if (pkg.isSameMapKeyType()) { pkg.setKeyType(pk.getKeyType()); } else { pkg.setKeyType(DataType.TUPLE); } } private void addShiftedKeyInfoIndex(int index, POPackage pkg) throws OptimizerException { /** * we only do multi query optimization for single input MROpers * Hence originally the keyInfo would have had only index 0. As * we merge MROpers into parent MROpers we add entries for the * multiquery based index (ORed with multi query bit mask). These additions * would mean we have many entries in the keyInfo while really it should * only have one since there is only one input that the package would * be processing and hence only one index. So each time we add an entry * for a new shifted index, we should clean up keyInfo so that it has only one entry * - the valid entry at that point. The "value" in the keyInfo map for the new * addition should be the same as the "value" in the existing Entry. After * addition, we should remove the older entry */ Map<Integer, Pair<Boolean, Map<Integer, Integer>>> keyInfo = pkg.getKeyInfo(); byte newIndex = (byte)(index | PigNullableWritable.mqFlag); Set<Integer> existingIndices = keyInfo.keySet(); if(existingIndices.size() != 1) { // we always maintain one entry in the keyinfo // which is the valid entry at the given stage of // multi query optimization int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } int existingIndex = existingIndices.iterator().next(); keyInfo.put(Integer.valueOf(newIndex), keyInfo.get(existingIndex)); // clean up the old entry so we only keep // the valid entry around - if we did something wrong while // setting this up, we will fail at runtime which is better // than doing something wrong and giving incorrect results! if(newIndex != existingIndex) { keyInfo.remove(existingIndex); } } /** * @param initialIndex * @param onePastEndIndex * @param mpkg * @throws OptimizerException */ private int addShiftedKeyInfoIndex(int initialIndex, int onePastEndIndex, POMultiQueryPackage mpkg) throws OptimizerException { List<POPackage> pkgs = mpkg.getPackages(); // if we have lesser pkgs than (onePastEndIndex - initialIndex) // its because one or more of the pkgs is a POMultiQueryPackage which // internally has packages. int numIndices = (onePastEndIndex - initialIndex); int end = numIndices; if(numIndices > pkgs.size()) { end = pkgs.size(); } else if (numIndices < pkgs.size()) { int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } int i = 0; int curIndex = initialIndex; while (i < end) { POPackage pkg = pkgs.get(i); addShiftedKeyInfoIndex(curIndex, pkg); curIndex++; i++; } return curIndex; // could be used in a caller who recursively called this function } private void mergeOneCombinePlanWithIndex(PhysicalPlan from, PhysicalPlan to, int initial, int current, byte mapKeyType) throws VisitorException { POPackage cpk = (POPackage)from.getRoots().get(0); from.remove(cpk); PODemux demux = (PODemux)to.getLeaves().get(0); POMultiQueryPackage pkg = (POMultiQueryPackage)to.getRoots().get(0); boolean isSameKeyType = pkg.isSameMapKeyType(); // if current > initial + 1, it means we had // a split in the map of the MROper we are trying to // merge. In that case we would have changed the indices // of the POLocalRearranges in the split to be in the // range initial to current. To handle key, value pairs // coming out of those POLocalRearranges, we add // the Packages in the 'from' POMultiQueryPackage (in this case, // it has to be a POMultiQueryPackage since we had // a POSplit in the map) to the 'to' POMultiQueryPackage. // These Packages would have correct positions in the package // list and would be able to handle the outputs from the different // POLocalRearranges. int total = current - initial; int pkCount = 0; if (cpk instanceof POMultiQueryPackage) { List<POPackage> pkgs = ((POMultiQueryPackage)cpk).getPackages(); for (POPackage p : pkgs) { pkg.addPackage(p); if (!isSameKeyType) { p.setKeyType(DataType.TUPLE); } pkCount++; } } else { pkg.addPackage(cpk); pkCount = 1; } pkg.setSameMapKeyType(isSameKeyType); if (pkCount != total) { int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } // all packages should have the same key type if (!isSameKeyType) { cpk.setKeyType(DataType.TUPLE); } pkg.setKeyType(cpk.getKeyType()); // See comment above for why we flatten the Packages // in the from plan - for the same reason, we flatten // the inner plans of Demux operator now. int plCount = 0; PhysicalOperator leaf = from.getLeaves().get(0); if (leaf instanceof PODemux) { List<PhysicalPlan> pls = ((PODemux)leaf).getPlans(); for (PhysicalPlan pl : pls) { demux.addPlan(pl); POLocalRearrange lr = (POLocalRearrange)pl.getLeaves().get(0); try { lr.setMultiQueryIndex(initial + plCount++); } catch (ExecException e) { int errCode = 2136; String msg = "Internal Error. Unable to set multi-query index for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // change the map key type to tuple when // multiple splittees have different map key types if (!isSameKeyType) { lr.setKeyType(DataType.TUPLE); } } } else { demux.addPlan(from); POLocalRearrange lr = (POLocalRearrange)from.getLeaves().get(0); try { lr.setMultiQueryIndex(initial + plCount++); } catch (ExecException e) { int errCode = 2136; String msg = "Internal Error. Unable to set multi-query index for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } // change the map key type to tuple when // multiple splittees have different map key types if (!isSameKeyType) { lr.setKeyType(DataType.TUPLE); } } if (plCount != total) { int errCode = 2146; String msg = "Internal Error. Inconsistency in key index found during optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } } private boolean needCombiner(List<MapReduceOper> mapReducers) { boolean needCombiner = false; for (MapReduceOper mrOp : mapReducers) { if (!mrOp.combinePlan.isEmpty()) { needCombiner = true; break; } } return needCombiner; } private PhysicalPlan createDemuxPlan(boolean sameKeyType, boolean isCombiner) throws VisitorException { PODemux demux = getDemux(isCombiner); POMultiQueryPackage pkg= getMultiQueryPackage(sameKeyType, isCombiner); PhysicalPlan pl = new PhysicalPlan(); pl.add(pkg); try { pl.addAsLeaf(demux); } catch (PlanException e) { int errCode = 2137; String msg = "Internal Error. Unable to add demux to the plan as leaf for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } return pl; } private void mergeAllMapReduceSplittees(List<MapReduceOper> mergeList, MapReduceOper splitter, POSplit splitOp) throws VisitorException { boolean sameKeyType = hasSameMapKeyType(mergeList); log.debug("Splittees have the same key type: " + sameKeyType); // create a new reduce plan that will be the container // for the multiple reducer plans of the MROpers in the mergeList PhysicalPlan redPl = createDemuxPlan(sameKeyType, false); // create a new combine plan that will be the container // for the multiple combiner plans of the MROpers in the mergeList PhysicalPlan comPl = needCombiner(mergeList) ? createDemuxPlan(sameKeyType, true) : null; log.debug("Splittees have combiner: " + (comPl != null)); int index = 0; for (MapReduceOper mrOp : mergeList) { // merge the map plan - this will recursively // set index on all POLocalRearranges encountered // including ones in inner plans of any POSplit // operators. Hence the index returned could be // > index + 1 int incIndex = mergeOneMapPlanWithIndex( mrOp.mapPlan, splitOp, index, sameKeyType); // merge the combiner plan if (comPl != null) { if (!mrOp.combinePlan.isEmpty()) { mergeOneCombinePlanWithIndex( mrOp.combinePlan, comPl, index, incIndex, mrOp.mapKeyType); } else { int errCode = 2141; String msg = "Internal Error. Cannot merge non-combiner with combiners for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG); } } // merge the reducer plan mergeOneReducePlanWithIndex( mrOp.reducePlan, redPl, index, incIndex, mrOp.mapKeyType); index = incIndex; log.info("Merged MR job " + mrOp.getOperatorKey().getId() + " into MR job " + splitter.getOperatorKey().getId()); } PhysicalPlan splitterPl = splitter.mapPlan; PhysicalOperator leaf = splitterPl.getLeaves().get(0); PhysicalOperator storeOp = splitterPl.getLeaves().get(0); List<PhysicalOperator> storePreds = splitterPl.getPredecessors(storeOp); // replace store operator in the splitter with split operator if (leaf instanceof POStore) { splitOp.setInputs(storePreds); try { splitterPl.replace(storeOp, splitOp);; } catch (PlanException e) { int errCode = 2132; String msg = "Internal Error. Unable to replace store with split operator for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } splitter.setMapDone(true); splitter.reducePlan = redPl; splitter.setReduceDone(true); if (comPl != null) { splitter.combinePlan = comPl; } for (MapReduceOper mrOp : mergeList) { removeAndReconnect(mrOp, splitter); } splitter.mapKeyType = sameKeyType ? mergeList.get(0).mapKeyType : DataType.TUPLE; log.info("Requested parallelism of splitter: " + splitter.getRequestedParallelism()); } private void mergeSingleMapReduceSplittee(MapReduceOper mapReduce, MapReduceOper splitter, POSplit splitOp) throws VisitorException { PhysicalPlan splitterPl = splitter.mapPlan; PhysicalOperator leaf = splitterPl.getLeaves().get(0); PhysicalOperator storeOp = splitterPl.getLeaves().get(0); List<PhysicalOperator> storePreds = splitterPl.getPredecessors(storeOp); PhysicalPlan pl = mapReduce.mapPlan; PhysicalOperator load = pl.getRoots().get(0); pl.remove(load); splitOp.addPlan(pl); splitter.setMapDone(true); splitter.reducePlan = mapReduce.reducePlan; splitter.setReduceDone(true); splitter.combinePlan = mapReduce.combinePlan; // replace store operator in the splitter with split operator if (leaf instanceof POStore) { splitOp.setInputs(storePreds); try { splitterPl.replace(storeOp, splitOp);; } catch (PlanException e) { int errCode = 2132; String msg = "Internal Error. Unable to replace store with split operator for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } removeAndReconnect(mapReduce, splitter); } /** * Removes the specified MR operator from the plan after the merge. * Connects its predecessors and successors to the merged MR operator * * @param mr the MR operator to remove * @param newMR the MR operator to be connected to the predecessors and * the successors of the removed operator * @throws VisitorException if connect operation fails */ private void removeAndReconnect(MapReduceOper mr, MapReduceOper newMR) throws VisitorException { List<MapReduceOper> mapperSuccs = getPlan().getSuccessors(mr); List<MapReduceOper> mapperPreds = getPlan().getPredecessors(mr); // make a copy before removing operator ArrayList<MapReduceOper> succsCopy = null; ArrayList<MapReduceOper> predsCopy = null; if (mapperSuccs != null) { succsCopy = new ArrayList<MapReduceOper>(mapperSuccs); } if (mapperPreds != null) { predsCopy = new ArrayList<MapReduceOper>(mapperPreds); } getPlan().remove(mr); // reconnect the mapper's successors if (succsCopy != null) { for (MapReduceOper succ : succsCopy) { try { getPlan().connect(newMR, succ); } catch (PlanException e) { int errCode = 2133; String msg = "Internal Error. Unable to connect map plan with successors for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } // reconnect the mapper's predecessors if (predsCopy != null) { for (MapReduceOper pred : predsCopy) { if (newMR.getOperatorKey().equals(pred.getOperatorKey())) { continue; } try { getPlan().connect(pred, newMR); } catch (PlanException e) { int errCode = 2134; String msg = "Internal Error. Unable to connect map plan with predecessors for optimization."; throw new OptimizerException(msg, errCode, PigException.BUG, e); } } } mergeMROperProperties(mr, newMR); } private void mergeMROperProperties(MapReduceOper from, MapReduceOper to) { if (from.isEndOfAllInputSetInMap()) { to.setEndOfAllInputInMap(true); } if (from.isEndOfAllInputSetInReduce()) { to.setEndOfAllInputInReduce(true); } if (from.getRequestedParallelism() > to.getRequestedParallelism()) { to.requestedParallelism = from.requestedParallelism; } if (!from.UDFs.isEmpty()) { to.UDFs.addAll(from.UDFs); } if (from.needsDistinctCombiner()) { to.setNeedsDistinctCombiner(true); } if (to.mapKeyType == DataType.UNKNOWN) { to.mapKeyType = from.mapKeyType; } } private boolean isMapOnly(MapReduceOper mr) { return mr.reducePlan.isEmpty(); } private boolean isSingleLoadMapperPlan(PhysicalPlan pl) { return (pl.getRoots().size() == 1); } private boolean isSinglePredecessor(MapReduceOper mr) { return (getPlan().getPredecessors(mr).size() == 1); } private POSplit getSplit(){ return new POSplit(new OperatorKey(scope, nig.getNextNodeId(scope))); } private MapReduceOper getMROper(){ return new MapReduceOper(new OperatorKey(scope, nig.getNextNodeId(scope))); } private POStore getStore(){ return new POStore(new OperatorKey(scope, nig.getNextNodeId(scope))); } private PODemux getDemux(boolean inCombiner){ PODemux demux = new PODemux(new OperatorKey(scope, nig.getNextNodeId(scope))); demux.setInCombiner(inCombiner); return demux; } private POMultiQueryPackage getMultiQueryPackage(boolean sameMapKeyType, boolean inCombiner){ POMultiQueryPackage pkg = new POMultiQueryPackage(new OperatorKey(scope, nig.getNextNodeId(scope))); pkg.setInCombiner(inCombiner); pkg.setSameMapKeyType(sameMapKeyType); return pkg; } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java.abi; import com.facebook.buck.jvm.java.abi.source.api.CannotInferException; import com.google.common.base.Preconditions; import java.util.List; import java.util.Map; import javax.annotation.Nullable; import javax.annotation.processing.Messager; import javax.lang.model.SourceVersion; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.ElementScanner8; import javax.lang.model.util.Elements; import javax.lang.model.util.SimpleAnnotationValueVisitor8; import javax.tools.Diagnostic; import org.objectweb.asm.AnnotationVisitor; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; class ClassVisitorDriverFromElement { private final DescriptorFactory descriptorFactory; private final Messager messager; private final SignatureFactory signatureFactory; private final SourceVersion targetVersion; private final Elements elements; private final AccessFlags accessFlagsUtils; private final InnerClassesTable innerClassesTable; /** * @param targetVersion the class file version to target, expressed as the corresponding Java * source version * @param messager */ ClassVisitorDriverFromElement(SourceVersion targetVersion, Elements elements, Messager messager) { this.targetVersion = targetVersion; this.elements = elements; descriptorFactory = new DescriptorFactory(elements); this.messager = messager; signatureFactory = new SignatureFactory(descriptorFactory); accessFlagsUtils = new AccessFlags(elements); innerClassesTable = new InnerClassesTable(descriptorFactory, accessFlagsUtils); } public void driveVisitor(Element fullElement, ClassVisitor visitor) { fullElement.accept(new ElementVisitorAdapter(), visitor); visitor.visitEnd(); } /** Gets the class file version corresponding to the given source version constant. */ private static int sourceVersionToClassFileVersion(SourceVersion version) { switch (version) { case RELEASE_0: return Opcodes.V1_1; // JVMS8 4.1: 1.0 and 1.1 both support version 45.3 (Opcodes.V1_1) case RELEASE_1: return Opcodes.V1_1; case RELEASE_2: return Opcodes.V1_2; case RELEASE_3: return Opcodes.V1_3; case RELEASE_4: return Opcodes.V1_4; case RELEASE_5: return Opcodes.V1_5; case RELEASE_6: return Opcodes.V1_6; case RELEASE_7: return Opcodes.V1_7; case RELEASE_8: return Opcodes.V1_8; default: throw new IllegalArgumentException(String.format("Unexpected source version: %s", version)); } } private interface VisitorWithAnnotations { AnnotationVisitor visitAnnotation(String desc, boolean visible); } private class ElementVisitorAdapter extends ElementScanner8<Void, ClassVisitor> { boolean classVisitorStarted = false; @Override public Void visitPackage(PackageElement e, ClassVisitor classVisitor) { classVisitor.visit( sourceVersionToClassFileVersion(targetVersion), Opcodes.ACC_SYNTHETIC | Opcodes.ACC_ABSTRACT | Opcodes.ACC_INTERFACE, e.getQualifiedName().toString().replace('.', '/') + "/package-info", null, "java/lang/Object", new String[0]); visitAnnotations(e, classVisitor::visitAnnotation); innerClassesTable.reportInnerClassReferences(e, classVisitor); classVisitor.visitEnd(); return null; } // TODO(jkeljo): Type annotations @Override public Void visitType(TypeElement e, ClassVisitor visitor) { if (classVisitorStarted) { // We'll get inner class references later return null; } TypeMirror superclass = e.getSuperclass(); if (superclass.getKind() == TypeKind.NONE) { superclass = Preconditions.checkNotNull(elements.getTypeElement("java.lang.Object")).asType(); } visitor.visit( sourceVersionToClassFileVersion(targetVersion), accessFlagsUtils.getAccessFlagsForClassNode(e), descriptorFactory.getInternalName(e), signatureFactory.getSignature(e), descriptorFactory.getInternalName(superclass), e.getInterfaces() .stream() .map(descriptorFactory::getInternalName) .toArray(size -> new String[size])); classVisitorStarted = true; visitAnnotations(e, visitor::visitAnnotation); super.visitType(e, visitor); innerClassesTable.reportInnerClassReferences(e, visitor); return null; } @Override public Void visitExecutable(ExecutableElement e, ClassVisitor visitor) { if (e.getModifiers().contains(Modifier.PRIVATE)) { return null; } // TODO(jkeljo): Bridge methods: Look at superclasses, then interfaces, checking whether // method types change in the new class String[] exceptions = e.getThrownTypes() .stream() .map(descriptorFactory::getInternalName) .toArray(count -> new String[count]); MethodVisitor methodVisitor = visitor.visitMethod( accessFlagsUtils.getAccessFlags(e), e.getSimpleName().toString(), descriptorFactory.getDescriptor(e), signatureFactory.getSignature(e), exceptions); visitParameters(e.getParameters(), methodVisitor, MoreElements.isInnerClassConstructor(e)); visitDefaultValue(e, methodVisitor); visitAnnotations(e, methodVisitor::visitAnnotation); methodVisitor.visitEnd(); return null; } private void visitParameters( List<? extends VariableElement> parameters, MethodVisitor methodVisitor, boolean isInnerClassConstructor) { if (isInnerClassConstructor) { // ASM uses a fake annotation to indicate synthetic parameters methodVisitor.visitParameterAnnotation(0, "Ljava/lang/Synthetic;", false); } for (int i = 0; i < parameters.size(); i++) { VariableElement parameter = parameters.get(i); for (AnnotationMirror annotationMirror : parameter.getAnnotationMirrors()) { if (MoreElements.isSourceRetention(annotationMirror)) { continue; } visitAnnotationValues( annotationMirror, methodVisitor.visitParameterAnnotation( isInnerClassConstructor ? i + 1 : i, descriptorFactory.getDescriptor(annotationMirror.getAnnotationType()), MoreElements.isRuntimeRetention(annotationMirror))); } } } private void visitDefaultValue(ExecutableElement e, MethodVisitor methodVisitor) { AnnotationValue defaultValue = e.getDefaultValue(); if (defaultValue == null) { return; } AnnotationVisitor annotationVisitor = methodVisitor.visitAnnotationDefault(); visitAnnotationValue(null, defaultValue, annotationVisitor); annotationVisitor.visitEnd(); } @Override public Void visitVariable(VariableElement e, ClassVisitor classVisitor) { if (e.getModifiers().contains(Modifier.PRIVATE)) { return null; } FieldVisitor fieldVisitor = classVisitor.visitField( accessFlagsUtils.getAccessFlags(e), e.getSimpleName().toString(), descriptorFactory.getDescriptor(e), signatureFactory.getSignature(e), e.getConstantValue()); visitAnnotations(e, fieldVisitor::visitAnnotation); fieldVisitor.visitEnd(); return null; } private void visitAnnotations(Element enclosingElement, VisitorWithAnnotations visitor) { enclosingElement .getAnnotationMirrors() .forEach(annotation -> visitAnnotation(enclosingElement, annotation, visitor)); } private void visitAnnotation( Element enclosingElement, AnnotationMirror annotation, VisitorWithAnnotations visitor) { try { if (MoreElements.isSourceRetention(annotation)) { return; } AnnotationVisitor annotationVisitor = visitor.visitAnnotation( descriptorFactory.getDescriptor(annotation.getAnnotationType()), MoreElements.isRuntimeRetention(annotation)); visitAnnotationValues(annotation, annotationVisitor); annotationVisitor.visitEnd(); } catch (CannotInferException e) { messager.printMessage( Diagnostic.Kind.ERROR, "Could not load the class file for this annotation. Consider adding required_for_source_only_abi = True to its build rule.", enclosingElement, annotation); } } private void visitAnnotationValues( AnnotationMirror annotation, AnnotationVisitor annotationVisitor) { visitAnnotationValues(annotation.getElementValues(), annotationVisitor); } private void visitAnnotationValues( Map<? extends ExecutableElement, ? extends AnnotationValue> elementValues, AnnotationVisitor visitor) { elementValues .entrySet() .forEach( entry -> visitAnnotationValue( entry.getKey().getSimpleName().toString(), entry.getValue(), visitor)); } private void visitAnnotationValue( @Nullable String name, AnnotationValue value, AnnotationVisitor visitor) { value.accept(new AnnotationVisitorAdapter(name, visitor), null); } private class AnnotationVisitorAdapter extends SimpleAnnotationValueVisitor8<Void, Void> { @Nullable private final String name; private final AnnotationVisitor visitor; private AnnotationVisitorAdapter(@Nullable String name, AnnotationVisitor visitor) { this.name = name; this.visitor = visitor; } @Override protected Void defaultAction(Object value, Void aVoid) { visitor.visit(name, value); return null; } @Override public Void visitType(TypeMirror value, Void aVoid) { visitor.visit(name, descriptorFactory.getType(value)); return null; } @Override public Void visitEnumConstant(VariableElement value, Void aVoid) { visitor.visitEnum( name, descriptorFactory.getDescriptor(value.getEnclosingElement().asType()), value.getSimpleName().toString()); return null; } @Override public Void visitAnnotation(AnnotationMirror value, Void aVoid) { AnnotationVisitor annotationValueVisitor = visitor.visitAnnotation( name, descriptorFactory.getDescriptor(value.getAnnotationType())); visitAnnotationValues(value, annotationValueVisitor); annotationValueVisitor.visitEnd(); return null; } @Override public Void visitArray(List<? extends AnnotationValue> listValue, Void aVoid) { AnnotationVisitor arrayMemberVisitor = visitor.visitArray(name); listValue.forEach( annotationValue -> visitAnnotationValue(null, annotationValue, arrayMemberVisitor)); arrayMemberVisitor.visitEnd(); return null; } } } }
package at.fh.swenga.game.dependencies.jcraft; /* -*-mode:java; c-basic-offset:2; indent-tabs-mode:nil -*- */ /* JOrbis * Copyright (C) 2000 ymnk, JCraft,Inc. * * Written by: 2000 ymnk<ymnk@jcraft.com> * * Many thanks to * Monty <monty@xiph.org> and * The XIPHOPHORUS Company http://www.xiph.org/ . * JOrbis has been based on their awesome works, Vorbis codec. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public License * as published by the Free Software Foundation; either version 2 of * the License, or (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ class Mdct{ int n; int log2n; float[] trig; int[] bitrev; float scale; void init(int n){ bitrev=new int[n/4]; trig=new float[n+n/4]; log2n=(int)Math.rint(Math.log(n)/Math.log(2)); this.n=n; int AE=0; int AO=1; int BE=AE+n/2; int BO=BE+1; int CE=BE+n/2; int CO=CE+1; // trig lookups... for(int i=0; i<n/4; i++){ trig[AE+i*2]=(float)Math.cos((Math.PI/n)*(4*i)); trig[AO+i*2]=(float)-Math.sin((Math.PI/n)*(4*i)); trig[BE+i*2]=(float)Math.cos((Math.PI/(2*n))*(2*i+1)); trig[BO+i*2]=(float)Math.sin((Math.PI/(2*n))*(2*i+1)); } for(int i=0; i<n/8; i++){ trig[CE+i*2]=(float)Math.cos((Math.PI/n)*(4*i+2)); trig[CO+i*2]=(float)-Math.sin((Math.PI/n)*(4*i+2)); } { int mask=(1<<(log2n-1))-1; int msb=1<<(log2n-2); for(int i=0; i<n/8; i++){ int acc=0; for(int j=0; msb>>>j!=0; j++) if(((msb>>>j)&i)!=0) acc|=1<<j; bitrev[i*2]=((~acc)&mask); // bitrev[i*2]=((~acc)&mask)-1; bitrev[i*2+1]=acc; } } scale=4.f/n; } void clear(){ } void forward(float[] in, float[] out){ } float[] _x=new float[1024]; float[] _w=new float[1024]; synchronized void backward(float[] in, float[] out){ if(_x.length<n/2){ _x=new float[n/2]; } if(_w.length<n/2){ _w=new float[n/2]; } float[] x=_x; float[] w=_w; int n2=n>>>1; int n4=n>>>2; int n8=n>>>3; // rotate + step 1 { int inO=1; int xO=0; int A=n2; int i; for(i=0; i<n8; i++){ A-=2; x[xO++]=-in[inO+2]*trig[A+1]-in[inO]*trig[A]; x[xO++]=in[inO]*trig[A+1]-in[inO+2]*trig[A]; inO+=4; } inO=n2-4; for(i=0; i<n8; i++){ A-=2; x[xO++]=in[inO]*trig[A+1]+in[inO+2]*trig[A]; x[xO++]=in[inO]*trig[A]-in[inO+2]*trig[A+1]; inO-=4; } } float[] xxx=mdct_kernel(x, w, n, n2, n4, n8); int xx=0; // step 8 { int B=n2; int o1=n4, o2=o1-1; int o3=n4+n2, o4=o3-1; for(int i=0; i<n4; i++){ float temp1=(xxx[xx]*trig[B+1]-xxx[xx+1]*trig[B]); float temp2=-(xxx[xx]*trig[B]+xxx[xx+1]*trig[B+1]); out[o1]=-temp1; out[o2]=temp1; out[o3]=temp2; out[o4]=temp2; o1++; o2--; o3++; o4--; xx+=2; B+=2; } } } private float[] mdct_kernel(float[] x, float[] w, int n, int n2, int n4, int n8){ // step 2 int xA=n4; int xB=0; int w2=n4; int A=n2; for(int i=0; i<n4;){ float x0=x[xA]-x[xB]; float x1; w[w2+i]=x[xA++]+x[xB++]; x1=x[xA]-x[xB]; A-=4; w[i++]=x0*trig[A]+x1*trig[A+1]; w[i]=x1*trig[A]-x0*trig[A+1]; w[w2+i]=x[xA++]+x[xB++]; i++; } // step 3 { for(int i=0; i<log2n-3; i++){ int k0=n>>>(i+2); int k1=1<<(i+3); int wbase=n2-2; A=0; float[] temp; for(int r=0; r<(k0>>>2); r++){ int w1=wbase; w2=w1-(k0>>1); float AEv=trig[A], wA; float AOv=trig[A+1], wB; wbase-=2; k0++; for(int s=0; s<(2<<i); s++){ wB=w[w1]-w[w2]; x[w1]=w[w1]+w[w2]; wA=w[++w1]-w[++w2]; x[w1]=w[w1]+w[w2]; x[w2]=wA*AEv-wB*AOv; x[w2-1]=wB*AEv+wA*AOv; w1-=k0; w2-=k0; } k0--; A+=k1; } temp=w; w=x; x=temp; } } // step 4, 5, 6, 7 { int C=n; int bit=0; int x1=0; int x2=n2-1; for(int i=0; i<n8; i++){ int t1=bitrev[bit++]; int t2=bitrev[bit++]; float wA=w[t1]-w[t2+1]; float wB=w[t1-1]+w[t2]; float wC=w[t1]+w[t2+1]; float wD=w[t1-1]-w[t2]; float wACE=wA*trig[C]; float wBCE=wB*trig[C++]; float wACO=wA*trig[C]; float wBCO=wB*trig[C++]; x[x1++]=(wC+wACO+wBCE)*.5f; x[x2--]=(-wD+wBCO-wACE)*.5f; x[x1++]=(wD+wBCO-wACE)*.5f; x[x2--]=(wC-wACO-wBCE)*.5f; } } return (x); } }
package ml.optimization; import static ml.utils.InPlaceOperator.affine; import static ml.utils.InPlaceOperator.assign; import static ml.utils.Matlab.innerProduct; import static ml.utils.Matlab.minus; import static ml.utils.Matlab.norm; import static ml.utils.Matlab.plus; import static ml.utils.Matlab.setMatrix; import static ml.utils.Matlab.times; import static ml.utils.Matlab.uminus; import java.util.ArrayList; import la.matrix.Matrix; /** * A Java implementation for the nonlinear conjugate gradient method. * It is a general algorithm interface, only gradient and objective * function value are needed to compute outside the class. * </p> * A simple example: </br></br> * <code> * double epsilon = ...; // Convergence tolerance</br> * Matrix W = ...; // Initial matrix (vector) you want to optimize</br> * Matrix G = ...; // Gradient at the initial matrix (vector) you want to optimize</br> * double fval = ...; // Initial objective function value</br> * </br> * boolean flags[] = null; </br> * while (true) { </br> * &nbsp flags = NonlinearConjugateGradient.run(G, fval, epsilon, W); // Update W in place</br> * &nbsp if (flags[0]) // flags[0] indicates if L-BFGS converges</br> * &nbsp &nbsp break; </br> * &nbsp fval = ...; // Compute the new objective function value at the updated W</br> * &nbsp if (flags[1]) // flags[1] indicates if gradient at the updated W is required</br> * &nbsp &nbsp G = ...; // Compute the gradient at the new W</br> * } </br> * </br> * </code> * * @version 1.0 Jan. 26th, 2014 * * @author Mingjie Qian */ public class NonlinearConjugateGradient { /** * Current gradient. */ private static Matrix G = null; /** * Last gradient. */ private static Matrix G_pre = null; /** * Current matrix variable that we want to optimize. */ private static Matrix X = null; /** * Decreasing step. */ private static Matrix p = null; /** * The last objective function value. */ private static double fval = 0; /** * If gradient is required for the next step. */ private static boolean gradientRequired = false; /** * If the algorithm converges or not. */ private static boolean converge = false; /** * State for the automata machine. * 0: Initialization * 1: Before backtracking line search * 2: Backtracking line search * 3: After backtracking line search * 4: Convergence */ private static int state = 0; /** * Step length for backtracking line search. */ private static double t = 1; /** * A temporary variable holding the inner product of the decreasing step p * and the gradient G, it should be always non-positive. */ private static double z = 0; /** * Iteration counter. */ private static int k = 0; private static double alpha = 0.05; private static double rou = 0.9; /** * Formula used to calculate beta. * 1: FR * 2: PR * 3: PR+ * 4: HS */ private static int formula = 4; /** * An array holding the sequence of objective function values. */ private static ArrayList<Double> J = new ArrayList<Double>(); /** * Main entry for the algorithm. The matrix variable to be * optimized will be updated in place to a better solution * point with lower objective function value. * * @param Grad_t gradient at original X_t, required on the * first revocation * * @param fval_t objective function value on original X_t * * @param epsilon convergence precision * * @param X_t current matrix variable to be optimized, will be * updated in place to a better solution point with * lower objective function value. * * @return a {@code boolean} array of two elements: {converge, gradientRequired} * */ public static boolean[] run(Matrix Grad_t, double fval_t, double epsilon, Matrix X_t) { // If the algorithm has converged, we do a new job if (state == 4) { G_pre = null; J.clear(); k = 0; state = 0; } if (state == 0) { X = X_t.copy(); if (Grad_t == null) { System.err.println("Gradient is required on the first call!"); System.exit(1); } G = Grad_t.copy(); fval = fval_t; if (Double.isNaN(fval)) { System.err.println("Object function value is nan!"); System.exit(1); } System.out.format("Initial ofv: %g\n", fval); p = uminus(G); state = 1; } if (state == 1) { double norm_Grad = norm(G); if (norm_Grad < epsilon) { converge = true; gradientRequired = false; state = 4; System.out.printf("CG converges with norm(Grad) %f\n", norm_Grad); return new boolean[] {converge, gradientRequired}; } t = 1; // z is always less than 0 z = innerProduct(G, p); state = 2; // X_t.setSubMatrix(plus(X, times(t, p)).getData(), 0, 0); setMatrix(X_t, plus(X, times(t, p))); converge = false; gradientRequired = false; return new boolean[] {converge, gradientRequired}; } // Backtracking line search if (state == 2) { converge = false; if (fval_t <= fval + alpha * t * z) { gradientRequired = true; state = 3; } else { t = rou * t; gradientRequired = false; // X_t.setSubMatrix(plus(X, times(t, p)).getData(), 0, 0); // setMatrix(X_t, plus(X, times(t, p))); affine(X_t, X, t, p); } // We don't need to compute X_t again since the X_t has already // satisfied the Armijo condition. // X_t.setSubMatrix(plus(X, times(t, p)).getData(), 0, 0); return new boolean[] {converge, gradientRequired}; } if (state == 3) { // X_pre = X.copy(); // G_pre = G.copy(); if (G_pre == null) G_pre = G.copy(); else assign(G_pre, G); /*if (Math.abs(fval_t - fval) < 1e-256) { converge = true; gradientRequired = false; System.out.printf("Objective function value doesn't decrease, iteration stopped!\n"); System.out.format("Iter %d, ofv: %g, norm(Grad): %g\n", k + 1, fval, norm(G)); return new boolean[] {converge, gradientRequired}; }*/ fval = fval_t; J.add(fval); System.out.format("Iter %d, ofv: %g, norm(Grad): %g\n", k + 1, fval, norm(G)); // X = X_t.copy(); assign(X, X_t); // G = Grad_t.copy(); assign(G, Grad_t); Matrix y_k = null; y_k = minus(G, G_pre); double beta = 0; switch (formula) { case 1: beta = innerProduct(G, G) / innerProduct(G_pre, G); break; case 2: beta = innerProduct(G, y_k) / innerProduct(G_pre, G_pre); break; case 3: beta = Math.max(innerProduct(G, y_k) / innerProduct(G_pre, G_pre), 0); break; case 4: beta = innerProduct(G, y_k) / innerProduct(y_k, p); break; case 5: beta = innerProduct(G, G) / innerProduct(y_k, p); break; default: beta = innerProduct(G, y_k) / innerProduct(y_k, p); break; } // p_{k+1} = -G + beta * p_{k} // p = uminus(G).plus(times(beta, p)); affine(p, beta, p, '-', G); /*timesAssign(p, beta); minusAssign(p, G);*/ k = k + 1; state = 1; } converge = false; gradientRequired = false; return new boolean[] {converge, gradientRequired}; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.apex.malhar.lib.state.managed; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Iterator; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import javax.validation.constraints.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.apex.malhar.lib.state.BucketedState; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.RemoteIterator; import com.google.common.base.Preconditions; import com.google.common.collect.Queues; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.datatorrent.api.Context; import com.datatorrent.lib.fileaccess.FileAccess; import com.datatorrent.netlet.util.Slice; /** * In this implementation of {@link AbstractManagedStateImpl} the buckets in memory are time-buckets. * * @since 3.4.0 */ public class ManagedTimeUnifiedStateImpl extends AbstractManagedStateImpl implements BucketedState { private final transient LinkedBlockingQueue<Long> purgedTimeBuckets = Queues.newLinkedBlockingQueue(); private final transient Set<Bucket> bucketsForTeardown = Sets.newHashSet(); public ManagedTimeUnifiedStateImpl() { bucketsFileSystem = new TimeUnifiedBucketsFileSystem(); } @Override public long getNumBuckets() { return timeBucketAssigner.getNumBuckets(); } @Override public void put(long time, @NotNull Slice key, @NotNull Slice value) { long timeBucket = timeBucketAssigner.getTimeBucket(time); putInBucket(timeBucket, timeBucket, key, value); } @Override public Slice getSync(long time, @NotNull Slice key) { long timeBucket = timeBucketAssigner.getTimeBucket(time); if (timeBucket == -1) { //time is expired so return expired slice. return BucketedState.EXPIRED; } return getValueFromBucketSync(timeBucket, timeBucket, key); } @Override public Future<Slice> getAsync(long time, @NotNull Slice key) { long timeBucket = timeBucketAssigner.getTimeBucket(time); if (timeBucket == -1) { //time is expired so return expired slice. return Futures.immediateFuture(BucketedState.EXPIRED); } return getValueFromBucketAsync(timeBucket, timeBucket, key); } @Override public void endWindow() { super.endWindow(); Long purgedTimeBucket; //collect all the purged time buckets while (null != (purgedTimeBucket = purgedTimeBuckets.poll())) { long purgedTimeBucketIdx = getBucketIdx(purgedTimeBucket); if (buckets.containsKey(purgedTimeBucketIdx) && buckets.get(purgedTimeBucketIdx).getBucketId() == purgedTimeBucket) { bucketsForTeardown.add(buckets.get(purgedTimeBucketIdx)); buckets.remove(purgedTimeBucketIdx); } } //tear down all the eligible time buckets Iterator<Bucket> bucketIterator = bucketsForTeardown.iterator(); while (bucketIterator.hasNext()) { Bucket bucket = bucketIterator.next(); if (!tasksPerBucketId.containsKey(bucket.getBucketId())) { //no pending asynchronous queries for this bucket id bucket.teardown(); bucketIterator.remove(); } } } @Override protected void handleBucketConflict(long bucketId, long newBucketId) { Preconditions.checkArgument(buckets.get(bucketId).getBucketId() < newBucketId, "new time bucket should have a value" + " greater than the old time bucket"); //Time buckets are purged periodically so here a bucket conflict is expected and so we just ignore conflicts. bucketsForTeardown.add(buckets.get(bucketId)); buckets.put(bucketId, newBucket(newBucketId)); buckets.get(bucketId).setup(this); } @Override public void purgeTimeBucketsLessThanEqualTo(long timeBucket) { purgedTimeBuckets.add(timeBucket); super.purgeTimeBucketsLessThanEqualTo(timeBucket); } @Override public void setup(Context.OperatorContext context) { // set UnboundedTimeBucketAssigner to this managed state impl if (timeBucketAssigner == null) { UnboundedTimeBucketAssigner unboundedTimeBucketAssigner = new UnboundedTimeBucketAssigner(); setTimeBucketAssigner(unboundedTimeBucketAssigner); } super.setup(context); } /** * This uses operator id instead of bucket id as the name of parent folder of time-buckets. This is because * multiple partitions may work on same time-buckets. */ private static class TimeUnifiedBucketsFileSystem extends BucketsFileSystem { @Override protected FileAccess.FileWriter getWriter(long bucketId, String fileName) throws IOException { return managedStateContext.getFileAccess().getWriter(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected FileAccess.FileReader getReader(long bucketId, String fileName) throws IOException { return managedStateContext.getFileAccess().getReader(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected void rename(long bucketId, String fromName, String toName) throws IOException { managedStateContext.getFileAccess().rename(managedStateContext.getOperatorContext().getId(), fromName, toName); } @Override protected DataOutputStream getOutputStream(long bucketId, String fileName) throws IOException { return managedStateContext.getFileAccess().getOutputStream(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected DataInputStream getInputStream(long bucketId, String fileName) throws IOException { return managedStateContext.getFileAccess().getInputStream(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected boolean exists(long bucketId, String fileName) throws IOException { return managedStateContext.getFileAccess().exists(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected RemoteIterator<LocatedFileStatus> listFiles(long bucketId) throws IOException { return managedStateContext.getFileAccess().listFiles(managedStateContext.getOperatorContext().getId()); } @Override protected void delete(long bucketId, String fileName) throws IOException { managedStateContext.getFileAccess().delete(managedStateContext.getOperatorContext().getId(), fileName); } @Override protected void deleteBucket(long bucketId) throws IOException { managedStateContext.getFileAccess().deleteBucket(managedStateContext.getOperatorContext().getId()); } @Override protected void addBucketName(long bucketId) { long operatorId = managedStateContext.getOperatorContext().getId(); if (!bucketNamesOnFS.contains(operatorId)) { bucketNamesOnFS.add(operatorId); } } } private static transient Logger LOG = LoggerFactory.getLogger(ManagedTimeUnifiedStateImpl.class); }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.ahc.javabody; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.ahc.AhcComponent; import org.apache.camel.component.ahc.AhcConstants; import org.apache.camel.component.ahc.BaseAhcTest; import org.apache.camel.component.jetty.JettyHttpComponent; import org.apache.camel.http.common.HttpCommonComponent; import org.junit.Test; /** * */ public class AhcProduceJavaBodyTest extends BaseAhcTest { @Override public boolean isUseRouteBuilder() { return false; } @Test public void testHttpSendJavaBodyAndReceiveString() throws Exception { HttpCommonComponent jetty = context.getComponent("jetty", HttpCommonComponent.class); jetty.setAllowJavaSerializedObject(true); AhcComponent ahc = context.getComponent("ahc", AhcComponent.class); ahc.setAllowJavaSerializedObject(true); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(getTestServerEndpointUri()) .process(new Processor() { public void process(Exchange exchange) throws Exception { MyCoolBean cool = exchange.getIn().getBody(MyCoolBean.class); assertNotNull(cool); assertEquals(123, cool.getId()); assertEquals("Camel", cool.getName()); // we send back plain test exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "text/plain"); exchange.getOut().setBody("OK"); } }); } }); context.start(); MyCoolBean cool = new MyCoolBean(123, "Camel"); String reply = template.requestBodyAndHeader(getAhcEndpointUri(), cool, Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT, String.class); assertEquals("OK", reply); } @Test public void testHttpSendJavaBodyAndReceiveJavaBody() throws Exception { HttpCommonComponent jetty = context.getComponent("jetty", HttpCommonComponent.class); jetty.setAllowJavaSerializedObject(true); AhcComponent ahc = context.getComponent("ahc", AhcComponent.class); ahc.setAllowJavaSerializedObject(true); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(getTestServerEndpointUri()) .process(new Processor() { public void process(Exchange exchange) throws Exception { MyCoolBean cool = exchange.getIn().getBody(MyCoolBean.class); assertNotNull(cool); assertEquals(123, cool.getId()); assertEquals("Camel", cool.getName()); MyCoolBean reply = new MyCoolBean(456, "Camel rocks"); exchange.getOut().setBody(reply); exchange.getOut().setHeader(Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); } }); } }); context.start(); MyCoolBean cool = new MyCoolBean(123, "Camel"); MyCoolBean reply = template.requestBodyAndHeader(getAhcEndpointUri(), cool, Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT, MyCoolBean.class); assertEquals(456, reply.getId()); assertEquals("Camel rocks", reply.getName()); } @Test public void testHttpSendStringAndReceiveJavaBody() throws Exception { HttpCommonComponent jetty = context.getComponent("jetty", HttpCommonComponent.class); jetty.setAllowJavaSerializedObject(true); AhcComponent ahc = context.getComponent("ahc", AhcComponent.class); ahc.setAllowJavaSerializedObject(true); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(getTestServerEndpointUri()) .process(new Processor() { public void process(Exchange exchange) throws Exception { String body = exchange.getIn().getBody(String.class); assertNotNull(body); assertEquals("Hello World", body); MyCoolBean reply = new MyCoolBean(456, "Camel rocks"); exchange.getOut().setBody(reply); exchange.getOut().setHeader(Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); } }); } }); context.start(); MyCoolBean reply = template.requestBody(getAhcEndpointUri(), "Hello World", MyCoolBean.class); assertEquals(456, reply.getId()); assertEquals("Camel rocks", reply.getName()); } @Test public void testNotAllowedReceive() throws Exception { HttpCommonComponent jetty = context.getComponent("jetty", HttpCommonComponent.class); jetty.setAllowJavaSerializedObject(true); AhcComponent ahc = context.getComponent("ahc", AhcComponent.class); ahc.setAllowJavaSerializedObject(false); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(getTestServerEndpointUri()) .process(new Processor() { public void process(Exchange exchange) throws Exception { String body = exchange.getIn().getBody(String.class); assertNotNull(body); assertEquals("Hello World", body); MyCoolBean reply = new MyCoolBean(456, "Camel rocks"); exchange.getOut().setBody(reply); exchange.getOut().setHeader(Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); } }); } }); context.start(); MyCoolBean reply = template.requestBody(getAhcEndpointUri(), "Hello World", MyCoolBean.class); assertNull(reply); } @Test public void testNotAllowed() throws Exception { JettyHttpComponent jetty = context.getComponent("jetty", JettyHttpComponent.class); jetty.setAllowJavaSerializedObject(false); AhcComponent ahc = context.getComponent("ahc", AhcComponent.class); ahc.setAllowJavaSerializedObject(false); context.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from(getTestServerEndpointUri()) .process(new Processor() { public void process(Exchange exchange) throws Exception { String body = exchange.getIn().getBody(String.class); assertNotNull(body); assertEquals("Hello World", body); MyCoolBean reply = new MyCoolBean(456, "Camel rocks"); exchange.getOut().setBody(reply); exchange.getOut().setHeader(Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT); } }); } }); context.start(); MyCoolBean cool = new MyCoolBean(123, "Camel"); try { template.requestBodyAndHeader(getAhcEndpointUri(), cool, Exchange.CONTENT_TYPE, AhcConstants.CONTENT_TYPE_JAVA_SERIALIZED_OBJECT, MyCoolBean.class); fail("Should fail"); } catch (Exception e) { assertTrue(e.getCause().getMessage().startsWith("Content-type application/x-java-serialized-object is not allowed")); } } }
package com.rrafols.packt.chart; import android.content.Context; import android.database.DataSetObserver; import android.graphics.Canvas; import android.graphics.DashPathEffect; import android.graphics.Paint; import android.graphics.Path; import android.graphics.Rect; import android.os.SystemClock; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.View; import java.text.DecimalFormat; import java.util.ArrayList; public class Chart extends View { private static final String TAG = Chart.class.getName(); private static final int TIME_THRESHOLD = 16; private static final float ANIM_THRESHOLD = 0.01f; private float scrollX; private float scrollY; private float scrollXTarget; private float scrollYTarget; private float frScrollX; private float frScrollY; private long timeStart; private long accTime; private ArrayList<Float>[] dataPoints; private Paint backgroundPaint; private Paint[] linePaint; private Paint[] circlePaint; private Path[] graphPath; private Path[] circlePath; private Path backgroundPath; private boolean regenerate; private float lastWidth; private float lastHeight; private Rect textBoundaries; private float minValue; private float maxValue; private float verticalDelta; private DecimalFormat decimalFormat; private String[] labels; private String[] verticalLabels; private boolean invertVerticalAxis; private boolean generateLabels; private boolean drawLegend; private ScaleGestureDetector scaleDetector; private float scale; private float dragX; private float dragY; private boolean zooming; private boolean dragged; private float maxLabelWidth; public Chart(Context context, AttributeSet attrs) { super(context, attrs); linePaint = new Paint[2]; linePaint[0] = new Paint(); linePaint[0].setAntiAlias(true); linePaint[0].setColor(0xffffffff); linePaint[0].setStrokeWidth(8.f); linePaint[0].setStyle(Paint.Style.STROKE); linePaint[1] = new Paint(linePaint[0]); linePaint[1].setColor(0xff4040ff); circlePaint = new Paint[2]; circlePaint[0] = new Paint(); circlePaint[0].setAntiAlias(true); circlePaint[0].setColor(0xffff2020); circlePaint[0].setStyle(Paint.Style.FILL); circlePaint[1] = new Paint(circlePaint[0]); circlePaint[1].setColor(0xff20ff20); backgroundPaint = new Paint(); backgroundPaint.setColor(0xffFFFF80); backgroundPaint.setStyle(Paint.Style.STROKE); backgroundPaint.setPathEffect(new DashPathEffect(new float[] {5, 5}, 0)); backgroundPaint.setTextSize(20.f); graphPath = new Path[2]; graphPath[0] = new Path(); graphPath[1] = new Path(); circlePath = new Path[2]; circlePath[0] = new Path(); circlePath[1] = new Path(); backgroundPath = new Path(); lastWidth = -1; lastHeight = -1; textBoundaries = new Rect(); decimalFormat = new DecimalFormat("#.##"); verticalLabels = new String[11]; invertVerticalAxis = false; drawLegend = true; generateLabels = true; dataPoints = (ArrayList<Float>[]) new ArrayList[2]; zooming = false; scale = 1.f; maxLabelWidth = 0.f; scaleDetector = new ScaleGestureDetector(context, new ScaleGestureDetector.SimpleOnScaleGestureListener() { private float focusX; private float focusY; private float scrollCorrectionX = 0.f; private float scrollCorrectionY = 0.f; @Override public boolean onScaleBegin(ScaleGestureDetector detector) { zooming = true; focusX = detector.getFocusX(); focusY = detector.getFocusY(); scrollCorrectionX = focusX * scale - scrollXTarget; scrollCorrectionY = focusY * scale - scrollYTarget; return true; } public boolean onScale(ScaleGestureDetector detector) { scale *= detector.getScaleFactor(); scale = Math.max(1.f, Math.min(scale, 2.f)); float currentX = focusX * scale - scrollXTarget; float currentY = focusY * scale - scrollYTarget; scrollXTarget += currentX - scrollCorrectionX; scrollYTarget += currentY - scrollCorrectionY; invalidate(); return true; } @Override public void onScaleEnd(ScaleGestureDetector detector) { zooming = true; } }); timeStart = SystemClock.elapsedRealtime(); } public void setDataPoints(float[] originalData) { setDataPoints(originalData, 0); } public void setDataPoints(float[] originalData, int index) { ArrayList<Float> array = new ArrayList<>(); for (float data : originalData) { array.add(data); } setDataPoints(array, index); } public void setDataPoints(ArrayList<Float> originalData) { setDataPoints(originalData, 0); } public void setDataPoints(ArrayList<Float> originalData, int index) { dataPoints[index] = new ArrayList<Float>(); dataPoints[index].addAll(originalData); adjustDataRange(); } private void adjustDataRange() { minValue = Float.MAX_VALUE; maxValue = Float.MIN_VALUE; for (int j = 0; j < dataPoints.length; j++) { for (int i = 0; dataPoints[j] != null && i < dataPoints[j].size(); i++) { if (dataPoints[j].get(i) < minValue) minValue = dataPoints[j].get(i); if (dataPoints[j].get(i) > maxValue) maxValue = dataPoints[j].get(i); } } verticalDelta = maxValue - minValue; regenerate = true; postInvalidate(); } public void addValue(float data) { addValue(data, 0); } public void addValue(float data, int index) { dataPoints[index].add(data); if (data < minValue || data > maxValue) { adjustDataRange(); } else { regenerate = true; postInvalidate(); } } public void setInvertVerticalAxis(boolean invertVerticalAxis) { this.invertVerticalAxis = invertVerticalAxis; regenerate = true; postInvalidate(); } public void setDrawLegend(boolean drawLegend) { this.drawLegend = drawLegend; regenerate = true; postInvalidate(); } public void setLabels(String[] labels) { this.labels = labels; generateLabels = false; } @Override protected void onDraw(Canvas canvas) { animateLogic(); canvas.drawARGB(255,0 ,0 ,0); canvas.save(); canvas.translate(-frScrollX, -frScrollY); canvas.scale(scale, scale); if (drawLegend && regenerate) { for (int i = 0; i <= 10; i++) { float step; if (!invertVerticalAxis) { step = ((float) i / 10.f); } else { step = ((float) (10 - i)) / 10.f; } float value = step * verticalDelta + minValue; verticalLabels[i] = decimalFormat.format(value); backgroundPaint.getTextBounds(verticalLabels[i], 0, verticalLabels[i].length(), textBoundaries); if (textBoundaries.width() > maxLabelWidth) { maxLabelWidth = textBoundaries.width(); } } } float labelLeftPadding = getPaddingLeft() + maxLabelWidth * 0.25f; float leftPadding = getPaddingLeft() + maxLabelWidth * 1.5f; float rightPadding = getPaddingRight(); float topPadding = getPaddingTop(); float width = canvas.getWidth() - leftPadding - rightPadding; float height = canvas.getHeight() - topPadding - getPaddingBottom() - backgroundPaint.getTextSize() + 0.5f; if (getLayoutDirection() == LAYOUT_DIRECTION_RTL) { leftPadding = getPaddingEnd(); labelLeftPadding = leftPadding + width + maxLabelWidth * 0.25f; } if (lastWidth != width || lastHeight != height) { regenerate = true; lastWidth = width; lastHeight = height; } regenerateGraphs(leftPadding, topPadding, width, height); drawLegend(canvas, labelLeftPadding, leftPadding, topPadding, width, height); drawGraphs(canvas); canvas.restore(); if (missingAnimations()) invalidate(); } private void regenerateGraphs(float leftPadding, float topPadding, float width, float height) { if (regenerate) { for (int j = 0; j < 2; j++) { circlePath[j].reset(); graphPath[j].reset(); } backgroundPath.reset(); if (generateLabels) { labels = new String[dataPoints[0].size() + 1]; for (int i = 0; i < labels.length; i++) { labels[i] = "" + i; } } for (int i = 0; i <= dataPoints[0].size(); i++) { float xl = width * (((float) i) / dataPoints[0].size()) + leftPadding; backgroundPath.moveTo(xl, topPadding); backgroundPath.lineTo(xl, topPadding + height); } for (int i = 0; i <= 10; i++) { float yl = ((float) i / 10.f) * height + topPadding; backgroundPath.moveTo(leftPadding, yl); backgroundPath.lineTo(leftPadding + width, yl); } for (int j = 0; j < 2; j++) { if (dataPoints[j] != null) { float x = leftPadding; float y = height * getDataPoint(0, j) + topPadding; graphPath[j].moveTo(x, y); circlePath[j].addCircle(x, y, 10, Path.Direction.CW); for (int i = 1; i < dataPoints[j].size(); i++) { x = width * (((float) i + 1) / dataPoints[j].size()) + leftPadding; y = height * getDataPoint(i, j) + topPadding; graphPath[j].lineTo(x, y); circlePath[j].addCircle(x, y, 10, Path.Direction.CW); } } } regenerate = false; } } private void drawGraphs(Canvas canvas) { for (int j = 0; j < graphPath.length; j++) { canvas.drawPath(graphPath[j], linePaint[j]); canvas.drawPath(circlePath[j], circlePaint[j]); } } private void drawLegend(Canvas canvas, float labelLeftPadding, float leftPadding, float topPadding, float width, float height) { if (drawLegend) { canvas.drawPath(backgroundPath, backgroundPaint); // draw bottom legend for (int i = 0; i <= dataPoints[0].size(); i++) { float xl = width * (((float) i) / dataPoints[0].size()) + leftPadding; backgroundPaint.getTextBounds(labels[i], 0, labels[i].length(), textBoundaries); canvas.drawText(labels[i], xl - (textBoundaries.width() / 2), height + topPadding + backgroundPaint.getTextSize() * 1.5f, backgroundPaint); } // draw side legend for (int i = 0; i <= 10; i++) { float step = ((float) i / 10.f); float yl = step * height + topPadding - (backgroundPaint.ascent() + backgroundPaint.descent()) * 0.5f; canvas.drawText(verticalLabels[i], labelLeftPadding, yl, backgroundPaint); } } } private float getDataPoint(int i, int index) { float data = (dataPoints[index].get(i) - minValue) / verticalDelta; return invertVerticalAxis ? 1.f - data : data; } @Override public boolean onTouchEvent(MotionEvent event) { scaleDetector.onTouchEvent(event); if (zooming) { invalidate(); zooming = false; return true; } switch(event.getAction()) { case MotionEvent.ACTION_DOWN: dragX = event.getX(); dragY = event.getY(); getParent().requestDisallowInterceptTouchEvent(true); dragged = false; return true; case MotionEvent.ACTION_UP: getParent().requestDisallowInterceptTouchEvent(false); return true; case MotionEvent.ACTION_MOVE: float newX = event.getX(); float newY = event.getY(); scrollScreen(dragX - newX, dragY - newY); dragX = newX; dragY = newY; dragged = true; return true; default: return false; } } /** * Check if there is any animation that has not finished. */ private boolean missingAnimations() { if (Math.abs(scrollXTarget - scrollX) > ANIM_THRESHOLD) return true; if (Math.abs(scrollYTarget - scrollY) > ANIM_THRESHOLD) return true; return false; } /** * Execute logic iterations and interpolate between current and next logic iteration */ private void animateLogic() { long currentTime = SystemClock.elapsedRealtime(); accTime += currentTime - timeStart; timeStart = currentTime; while (accTime > TIME_THRESHOLD) { scrollX += (scrollXTarget - scrollX) / 4.f; scrollY += (scrollYTarget - scrollY) / 4.f; accTime -= TIME_THRESHOLD; } float factor = ((float) accTime) / TIME_THRESHOLD; float nextScrollX = scrollX + (scrollXTarget - scrollX) / 4.f; float nextScrollY = scrollY + (scrollYTarget - scrollY) / 4.f; frScrollX = scrollX * (1.f - factor) + nextScrollX * factor; frScrollY = scrollY * (1.f - factor) + nextScrollY * factor; } /** * scroll screen by dx, dy and trigger a redraw cycle. */ private void scrollScreen(float dx, float dy) { scrollXTarget += dx; scrollYTarget += dy; if (scrollXTarget < 0) scrollXTarget = 0; if (scrollYTarget < 0) scrollYTarget = 0; if (scrollXTarget > getWidth() * scale - getWidth()) { scrollXTarget = getWidth() * scale - getWidth(); } if (scrollYTarget > getHeight() * scale - getHeight()) { scrollYTarget = getHeight() * scale - getHeight(); } invalidate(); } }
/* * Copyright (c) 2011-2021, PCJ Library, Marek Nowicki * All rights reserved. * * Licensed under New BSD License (3-clause license). * * See the file "LICENSE" for the full license governing this code. */ package org.pcj.internal.message.splitgroup; import java.nio.channels.SocketChannel; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.pcj.Group; import org.pcj.PcjFuture; import org.pcj.internal.InternalCommonGroup; import org.pcj.internal.InternalGroup; import org.pcj.internal.InternalPCJ; import org.pcj.internal.NodeData; import org.pcj.internal.PcjThread; import org.pcj.internal.message.Message; /* * @author Marek Nowicki (faramir@mat.umk.pl) */ public class SplitGroupStates { private final ConcurrentMap<Integer, AtomicInteger> counterMap; private final ConcurrentMap<Integer, State> stateMap; public SplitGroupStates() { counterMap = new ConcurrentHashMap<>(); stateMap = new ConcurrentHashMap<>(); } public int getNextRound(int threadId) { AtomicInteger roundCounter = counterMap.computeIfAbsent(threadId, key -> new AtomicInteger(0)); return roundCounter.incrementAndGet(); } public State getOrCreate(int round, InternalCommonGroup commonGroup) { return stateMap.computeIfAbsent(round, _round -> new State(_round, commonGroup.getLocalThreadsId().size(), commonGroup.getCommunicationTree().getChildrenNodes().size())); } public State remove(int round) { return stateMap.remove(round); } public static class State { private final int round; private final AtomicReference<NotificationCount> notificationCount; private final AtomicInteger readyToGoNotificationCount; private final ConcurrentMap<Integer, SplitGroupFuture> futureMap; private final ConcurrentMap<Integer, Integer> splitMap; private final ConcurrentMap<Integer, Integer> orderingMap; private State(int round, int localCount, int childrenCount) { this.round = round; futureMap = new ConcurrentHashMap<>(); splitMap = new ConcurrentHashMap<>(); orderingMap = new ConcurrentHashMap<>(); notificationCount = new AtomicReference<>(new NotificationCount(localCount, childrenCount)); readyToGoNotificationCount = new AtomicInteger(childrenCount + 1); } public PcjFuture<Group> getFuture(int threadId) { return futureMap.get(threadId); } public void processLocal(InternalCommonGroup group, int threadId, Integer split, int ordering) { futureMap.put(threadId, new SplitGroupFuture()); if (split != null) { splitMap.put(threadId, split); orderingMap.put(threadId, ordering); } else { futureMap.get(threadId).signalDone(); } NotificationCount count = notificationCount.updateAndGet( old -> new NotificationCount(old.local - 1, old.physical)); if (count.isDone()) { nodeProcessed(group); } } protected void processPhysical(InternalCommonGroup group, Map<Integer, Integer> splitMap, Map<Integer, Integer> orderingMap) { this.splitMap.putAll(splitMap); this.orderingMap.putAll(orderingMap); NotificationCount count = notificationCount.updateAndGet( old -> new NotificationCount(old.local, old.physical - 1)); if (count.isDone()) { nodeProcessed(group); } } private void nodeProcessed(InternalCommonGroup group) { Message message; SocketChannel socket; NodeData nodeData = InternalPCJ.getNodeData(); int parentId = group.getCommunicationTree().getParentNode(); if (group.getCommunicationTree().getParentNode() >= 0) { message = new SplitGroupRequestMessage(group.getGroupId(), round, splitMap, orderingMap); socket = nodeData.getSocketChannelByPhysicalId(parentId); } else { int splitCount = (int) splitMap.values().stream().distinct().count(); message = new SplitGroupQueryMessage(group.getGroupId(), round, splitCount); socket = nodeData.getNode0Socket(); } InternalPCJ.getNetworker().send(socket, message); } protected void signalDone() { futureMap.values().forEach(SplitGroupFuture::signalDone); } protected void groupIdsAnswer(InternalCommonGroup group, int[] groupIds) { int[] splitIds = splitMap.values().stream().mapToInt(Integer::intValue).distinct().toArray(); // mapping: split number -> group id Map<Integer, Integer> splitNumToGroupIdMap = IntStream.range(0, groupIds.length) .collect(HashMap::new, (map, key) -> map.put(splitIds[key], groupIds[key]), Map::putAll); // mapping: groupId -> List of thread groupId Map<Integer, List<Integer>> threadGroupIdMap = orderingMap.entrySet().stream() .sorted(Map.Entry.<Integer, Integer>comparingByValue() .thenComparing(Map.Entry.comparingByKey())) .map(Map.Entry::getKey) .collect(Collectors.groupingBy(threadId -> splitNumToGroupIdMap.get(splitMap.get(threadId)))); createGroups(group, threadGroupIdMap); } protected void createGroups(InternalCommonGroup group, Map<Integer, List<Integer>> threadGroupIdMap) { NodeData nodeData = InternalPCJ.getNodeData(); Message message = new SplitGroupResponseMessage(group.getGroupId(), round, threadGroupIdMap); group.getCommunicationTree().getChildrenNodes().stream() .map(nodeData::getSocketChannelByPhysicalId) .forEach(socket -> InternalPCJ.getNetworker().send(socket, message)); Set<Integer> localThreadsId = group.getLocalThreadsId(); // mapping: thread groupId -> groupId for (Map.Entry<Integer, List<Integer>> entry : threadGroupIdMap.entrySet()) { if (Collections.disjoint(localThreadsId, entry.getValue())) { continue; } InternalCommonGroup newCommonGroup = nodeData.getOrCreateCommonGroup(entry.getKey()); Map<Integer, Integer> groupIdGlobalIdMap = new HashMap<>(); int newThreadId = 0; for (int threadId : entry.getValue()) { int globalThreadId = group.getGlobalThreadId(threadId); groupIdGlobalIdMap.put(newThreadId, globalThreadId); if (localThreadsId.contains(threadId)) { InternalGroup threadGroup = new InternalGroup(newThreadId, newCommonGroup); PcjThread pcjThread = nodeData.getPcjThread(globalThreadId); pcjThread.getThreadData().addGroup(threadGroup); futureMap.get(threadId).setGroup(threadGroup); } ++newThreadId; } newCommonGroup.updateThreadsMap(groupIdGlobalIdMap); } readyToGo(group); } protected void readyToGo(InternalCommonGroup group) { int leftPhysical = readyToGoNotificationCount.decrementAndGet(); if (leftPhysical == 0) { NodeData nodeData = InternalPCJ.getNodeData(); Message message; SocketChannel socket; int parentId = group.getCommunicationTree().getParentNode(); if (parentId >= 0) { message = new SplitGroupWaitingMessage(group.getGroupId(), round); socket = nodeData.getSocketChannelByPhysicalId(parentId); } else { message = new SplitGroupGoMessage(group.getGroupId(), round); socket = nodeData.getSocketChannelByPhysicalId(nodeData.getCurrentNodePhysicalId()); } InternalPCJ.getNetworker().send(socket, message); } } private static class NotificationCount { private final int local; private final int physical; public NotificationCount(int local, int physical) { this.local = local; this.physical = physical; } boolean isDone() { return local == 0 && physical == 0; } } } }
/* * Copyright 2013 Hayden Smith * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package de.redoxi.ruste.core.editors; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.text.BadLocationException; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ITreeContentProvider; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StyledCellLabelProvider; import org.eclipse.jface.viewers.StyledString; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerCell; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.texteditor.IDocumentProvider; import org.eclipse.ui.texteditor.ITextEditor; import org.eclipse.ui.views.contentoutline.ContentOutlinePage; import de.redoxi.ruste.core.Plugin; import de.redoxi.ruste.core.model.ast.ASTNode; import de.redoxi.ruste.core.model.ast.Arg; import de.redoxi.ruste.core.model.ast.Crate; import de.redoxi.ruste.core.model.ast.EnumVariant; import de.redoxi.ruste.core.model.ast.Enumeration; import de.redoxi.ruste.core.model.ast.Field; import de.redoxi.ruste.core.model.ast.Function; import de.redoxi.ruste.core.model.ast.Identifiable; import de.redoxi.ruste.core.model.ast.Implementation; import de.redoxi.ruste.core.model.ast.ImplementationMethod; import de.redoxi.ruste.core.model.ast.Item; import de.redoxi.ruste.core.model.ast.Module; import de.redoxi.ruste.core.model.ast.NamedArg; import de.redoxi.ruste.core.model.ast.NamedField; import de.redoxi.ruste.core.model.ast.SelfArg; import de.redoxi.ruste.core.model.ast.Structure; import de.redoxi.ruste.core.model.ast.Trait; import de.redoxi.ruste.core.model.ast.TraitMethod; import de.redoxi.ruste.core.model.ast.Visible; import de.redoxi.ruste.core.parser.IRustParser; import de.redoxi.ruste.core.parser.NativeParser; import de.redoxi.ruste.core.ui.ASTIcon; /** * Parses a Rust source file to provide an overview of the source file in the * Outline view * * @author Hayden Smith * @since 0.0.1 */ public class RustContentOutlinePage extends ContentOutlinePage { private IDocumentProvider documentProvider; private ITextEditor editor; private IEditorInput input; private ImageRegistry imageRegistry; public RustContentOutlinePage(IDocumentProvider documentProvider, ITextEditor editor) { super(); this.documentProvider = documentProvider; this.editor = editor; this.imageRegistry = Plugin.getInstance().getImageRegistry(); } @Override public void createControl(Composite parent) { super.createControl(parent); TreeViewer viewer = getTreeViewer(); viewer.setLabelProvider(new LabelProvider()); viewer.setContentProvider(new ContentProvider()); viewer.addSelectionChangedListener(this); if (input != null) viewer.setInput(input); } @Override public void selectionChanged(SelectionChangedEvent event) { super.selectionChanged(event); final IStructuredSelection selection = (IStructuredSelection) event .getSelection(); final ASTNode selectedItem = (ASTNode) selection.getFirstElement(); if (selectedItem == null) { return; } IDocument document = documentProvider.getDocument(input); editor.resetHighlightRange(); try { int offset = document .getLineOffset(selectedItem.getStartLine() - 1) + selectedItem.getStartPos(); int length = document.getLineOffset(selectedItem.getEndLine() - 1) + selectedItem.getEndPos() - offset; editor.setHighlightRange(offset, length, true); } catch (BadLocationException ex) { } } /** * Sets the input of the outline page * * @param input * the input of this outline page */ public void setInput(IEditorInput input) { this.input = input; update(); } /** * Updates the outline page. */ public void update() { TreeViewer viewer = getTreeViewer(); if (viewer != null) { Control control = viewer.getControl(); if (control != null && !control.isDisposed()) { control.setRedraw(false); viewer.setInput(input); viewer.expandAll(); control.setRedraw(true); } } } /** * Styles nodes in the Rust outline */ class LabelProvider extends StyledCellLabelProvider { @Override public void update(ViewerCell cell) { ASTNode item = (ASTNode) cell.getElement(); Image icon = null; StyledString label = new StyledString(); // Default icon based on visibility if (item instanceof Visible) { switch (((Visible) item).getVisibility()) { case PRIVATE: icon = imageRegistry.get(ASTIcon.PRIVATE.getId()); default: icon = imageRegistry.get(ASTIcon.PUBLIC.getId()); } } if (item instanceof Identifiable && ((Identifiable) item).getIdentifier() != null && !((Identifiable) item).getIdentifier().isEmpty()) { label.append(((Identifiable) item).getIdentifier()); } if (item instanceof Crate) { icon = imageRegistry.get(ASTIcon.CRATE.getId()); } else if (item instanceof Module) { icon = imageRegistry.get(ASTIcon.MODULE.getId()); } else if (item instanceof Function) { label.append(" ("); appendArgsToLabel((Function) item, label); label.append(") "); appendReturnTypeToLabel((Function) item, label); } else if (item instanceof Structure) { icon = imageRegistry.get(ASTIcon.STRUCT.getId()); } else if (item instanceof Enumeration) { icon = imageRegistry.get(ASTIcon.ENUM.getId()); } /* * else if (item instanceof Static) { icon = STATIC_ICON; * label.append(" : ", StyledString.DECORATIONS_STYLER); * label.append(((Static) item).getType(), * StyledString.DECORATIONS_STYLER); } */ else if (item instanceof Trait) { icon = imageRegistry.get(ASTIcon.TRAIT.getId()); } else if (item instanceof Implementation) { icon = imageRegistry.get(ASTIcon.IMPL.getId()); } else if (item instanceof EnumVariant) { icon = imageRegistry.get(ASTIcon.ENUM_VARIANT.getId()); } else if (item instanceof TraitMethod) { label.append(" ("); appendMethodArgsToLabel((TraitMethod) item, label); label.append(") "); appendReturnTypeToLabel((TraitMethod) item, label); } else if (item instanceof ImplementationMethod) { label.append(" ("); appendMethodArgsToLabel((ImplementationMethod) item, label); label.append(") "); appendReturnTypeToLabel((ImplementationMethod) item, label); } else if (item instanceof Field) { if (item instanceof NamedField) { label.append(" : "); } label.append(((Field) item).getTrait()); } cell.setText(label.toString()); if (icon != null) { cell.setImage(icon); } super.update(cell); } protected void appendArgsToLabel(Function fn, StyledString label) { for (int i = 0; i < fn.getArgs().size(); ++i) { final NamedArg arg = fn.getArgs().get(i); final String name = arg.getIdentifier(); final String type = arg.getType(); label.append(name); label.append(" : "); label.append(type); if (i < fn.getArgs().size() - 1) { label.append(", "); } } } protected void appendMethodArgsToLabel(TraitMethod method, StyledString label) { for (int i = 0; i < method.getArgs().size(); ++i) { final Arg arg = method.getArgs().get(i); if (arg instanceof SelfArg) { label.append(arg.getType()); } else { final String name = ((NamedArg) arg).getIdentifier(); final String type = arg.getType(); label.append(name); label.append(" : "); label.append(type); } if (i < method.getArgs().size() - 1) { label.append(", "); } } } protected void appendMethodArgsToLabel(ImplementationMethod method, StyledString label) { for (int i = 0; i < method.getArgumentNames().size(); ++i) { final String name = method.getArgumentNames().get(i); final String type = method.getArgumentTypes().get(i); label.append(name); label.append(" : "); label.append(type); if (i < method.getArgumentNames().size() - 1) { label.append(", "); } } } protected void appendReturnTypeToLabel(Function fn, StyledString label) { if (fn.getReturnType() != null) { label.append(" -> ", StyledString.DECORATIONS_STYLER); label.append(fn.getReturnType(), StyledString.DECORATIONS_STYLER); } } protected void appendReturnTypeToLabel(TraitMethod fn, StyledString label) { if (fn.getReturnType() != null) { label.append(" -> ", StyledString.DECORATIONS_STYLER); label.append(fn.getReturnType(), StyledString.DECORATIONS_STYLER); } } protected void appendReturnTypeToLabel(ImplementationMethod fn, StyledString label) { if (fn.getReturnType() != null) { label.append(" -> ", StyledString.DECORATIONS_STYLER); label.append(fn.getReturnType(), StyledString.DECORATIONS_STYLER); } } } /** * Provide nodes for the parse tree of the given editor input * * TODO Move this to a source document parser class */ public class ContentProvider implements ITreeContentProvider { private Crate crate; @Override public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { if (newInput != null) { IDocument document = documentProvider.getDocument(newInput); if (document != null) { parse(document); } } } protected void parse(IDocument document) { final String crateIdentifier = input.getName().substring(0, input.getName().lastIndexOf('.')); final IRustParser parser = new NativeParser(); parser.setName(crateIdentifier); parser.setSource(document); parser.parse(); crate = (Crate) parser.getRoot(); if (crate != null) { crate.setIdentifier(crateIdentifier); } } @Override public Object[] getChildren(Object object) { if (object instanceof Crate) { return ((Crate) object).getItems().toArray(); } else if (object instanceof Module) { return ((Item) object).getItems().toArray(); } else if (object instanceof Trait) { return ((Trait) object).getMethods().toArray(); } else if (object instanceof Implementation) { return ((Implementation) object).getMethods().toArray(); } else if (object instanceof Enumeration) { return ((Enumeration) object).getVariants().toArray(); } else if (object instanceof Implementation) { return ((Implementation) object).getMethods().toArray(); } else if (object instanceof Structure) { return ((Structure) object).getFields().toArray(); } return null; } @Override public Object[] getElements(Object arg0) { return new Object[] { crate }; } @Override public Object getParent(Object object) { if (object instanceof ASTNode) { return ((ASTNode) object).getParent(); } return null; } @Override public boolean hasChildren(Object object) { if (object instanceof ASTNode) { return ((ASTNode) object).hasChildren(); } return false; } @Override public void dispose() { crate = null; } } /* * TODO Type definitions * * @see http://static.rust-lang.org/doc/master/rust.html#type-definitions */ }
/* * Copyright 2011-2012 Amazon Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.eclipse.codedeploy.explorer.editor; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.action.Action; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.forms.IFormColors; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.eclipse.ui.part.EditorPart; import com.amazonaws.eclipse.codedeploy.explorer.editor.table.DeploymentsTableView; import com.amazonaws.eclipse.codedeploy.explorer.image.CodeDeployExplorerImages; import com.amazonaws.eclipse.core.AwsToolkitCore; import com.amazonaws.eclipse.core.mobileanalytics.AwsToolkitMetricType; import com.amazonaws.eclipse.explorer.AwsAction; import com.amazonaws.services.codedeploy.AmazonCodeDeploy; import com.amazonaws.services.codedeploy.model.AutoScalingGroup; import com.amazonaws.services.codedeploy.model.DeploymentGroupInfo; import com.amazonaws.services.codedeploy.model.EC2TagFilter; import com.amazonaws.services.codedeploy.model.GetDeploymentGroupRequest; public class DeploymentGroupEditor extends EditorPart { public final static String ID = "com.amazonaws.eclipse.codedeploy.explorer.editor.deploymentGroupEditor"; private DeploymentGroupEditorInput deploymentGroupEditorInput; private DeploymentsTableView deploymentsTable; public DeploymentsTableView getDeploymentsTableView() { return deploymentsTable; } @Override public void doSave(IProgressMonitor monitor) {} @Override public void doSaveAs() {} @Override public void init(IEditorSite site, IEditorInput input) throws PartInitException { setSite(site); setInput(input); deploymentGroupEditorInput = (DeploymentGroupEditorInput) input; setPartName(input.getName()); } @Override public boolean isDirty() { return false; } @Override public boolean isSaveAsAllowed() { return false; } @Override public void createPartControl(Composite parent) { FormToolkit toolkit = new FormToolkit(Display.getDefault()); ScrolledForm form = new ScrolledForm(parent, SWT.V_SCROLL | SWT.H_SCROLL); form.setExpandHorizontal(true); form.setExpandVertical(true); form.setBackground(toolkit.getColors().getBackground()); form.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); form.setFont(JFaceResources.getHeaderFont()); form.setText(deploymentGroupEditorInput.getName()); toolkit.decorateFormHeading(form.getForm()); form.setImage(AwsToolkitCore.getDefault().getImageRegistry() .get(CodeDeployExplorerImages.IMG_DEPLOYMENT_GROUP)); form.getBody().setLayout(new GridLayout(1, false)); createDeploymentGroupSummary(form, toolkit); createDeploymentHistoryTable(form, toolkit); form.getToolBarManager().add(new RefreshAction()); form.getToolBarManager().update(true); } private class RefreshAction extends AwsAction { public RefreshAction() { super(AwsToolkitMetricType.EXPLORER_CODEDEPLOY_REFRESH_DEPLOYMENT_GROUP_EDITOR); this.setText("Refresh"); this.setToolTipText("Refresh deployment history"); this.setImageDescriptor(AwsToolkitCore.getDefault() .getImageRegistry() .getDescriptor(AwsToolkitCore.IMAGE_REFRESH)); } @Override protected void doRun() { deploymentsTable.refreshAsync(); actionFinished(); } } /** * Creates the table of deployment histories */ private void createDeploymentHistoryTable(final ScrolledForm form, final FormToolkit toolkit) { deploymentsTable = new DeploymentsTableView( deploymentGroupEditorInput, form.getBody(), toolkit, SWT.None); deploymentsTable.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); } /** * Creates a summary of a the deployment group */ private void createDeploymentGroupSummary(final ScrolledForm form, final FormToolkit toolkit) { final Composite parent = toolkit.createComposite(form.getBody(), SWT.None); parent.setLayout(new GridLayout(2, false)); toolkit.createLabel(parent, "Deployment Group info loading"); toolkit.createLabel(parent, ""); new Thread() { @Override public void run() { AmazonCodeDeploy codeDeployClient = deploymentGroupEditorInput .getCodeDeployClient(); DeploymentGroupInfo deployGroupInfo = codeDeployClient.getDeploymentGroup( new GetDeploymentGroupRequest() .withApplicationName(deploymentGroupEditorInput.getApplicationName()) .withDeploymentGroupName(deploymentGroupEditorInput.getDeploymentGroupName()) ) .getDeploymentGroupInfo(); if ( deployGroupInfo == null ) return; updateComposite(form, toolkit, deployGroupInfo); } protected void updateComposite(final ScrolledForm form, final FormToolkit toolkit, final DeploymentGroupInfo deployGroup) { Display.getDefault().syncExec(new Runnable() { @Override public void run() { for ( Control c : parent.getChildren() ) { c.dispose(); } toolkit.createLabel(parent, "Application Name: "); toolkit.createLabel(parent, deployGroup.getApplicationName()); toolkit.createLabel(parent, "Deployment Group Name: "); toolkit.createLabel(parent, deployGroup.getDeploymentGroupName()); toolkit.createLabel(parent, "Deployment Group ID: "); toolkit.createLabel(parent, deployGroup.getDeploymentGroupId()); toolkit.createLabel(parent, "Service Role ARN: "); toolkit.createLabel(parent, deployGroup.getServiceRoleArn()); toolkit.createLabel(parent, "Deployment Configuration: "); toolkit.createLabel(parent, deployGroup.getDeploymentConfigName()); if (deployGroup.getEc2TagFilters() != null && !deployGroup.getEc2TagFilters().isEmpty()) { toolkit.createLabel(parent, "Amazon EC2 Tags: "); StringBuilder tags = new StringBuilder(); boolean first = true; for (EC2TagFilter tag : deployGroup.getEc2TagFilters()) { if (first) { first = false; } else { tags.append(", "); } if ("KEY_AND_VALUE".equals(tag.getType())) { tags.append(tag.getKey() + ":" + tag.getValue()); } else if ("KEY_ONLY".equals(tag.getType())) { tags.append(tag.getKey() + "(KEY_ONLY)"); } else if ("VALUE_ONLY".equals(tag.getType())) { tags.append(tag.getValue() + "(VALUE_ONLY)"); } } toolkit.createLabel(parent, tags.toString()); } if (deployGroup.getAutoScalingGroups() != null && !deployGroup.getAutoScalingGroups().isEmpty()) { toolkit.createLabel(parent, "Associated Auto Scaling Groups: "); StringBuilder groups = new StringBuilder(); boolean first = true; for (AutoScalingGroup group : deployGroup.getAutoScalingGroups()) { if (first) { first = false; } else { groups.append(", "); } groups.append(group.getName() + ":" + group.getHook()); } toolkit.createLabel(parent, groups.toString()); } form.reflow(true); } }); } }.start(); } @Override public void setFocus() { } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.client.logging; import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.kaaproject.kaa.client.channel.failover.FailoverManager; import org.kaaproject.kaa.client.channel.KaaChannelManager; import org.kaaproject.kaa.client.channel.LogTransport; import org.kaaproject.kaa.client.channel.TransportConnectionInfo; import org.kaaproject.kaa.client.channel.failover.FailoverStatus; import org.kaaproject.kaa.client.context.ExecutorContext; import org.kaaproject.kaa.client.logging.future.RecordFuture; import org.kaaproject.kaa.client.logging.memory.MemLogStorage; import org.kaaproject.kaa.common.TransportType; import org.kaaproject.kaa.common.endpoint.gen.LogDeliveryErrorCode; import org.kaaproject.kaa.common.endpoint.gen.LogDeliveryStatus; import org.kaaproject.kaa.common.endpoint.gen.LogEntry; import org.kaaproject.kaa.common.endpoint.gen.LogSyncRequest; import org.kaaproject.kaa.common.endpoint.gen.LogSyncResponse; import org.kaaproject.kaa.common.endpoint.gen.SyncResponseResultType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Reference implementation of @see LogCollector * * @author Andrew Shvayka */ public abstract class AbstractLogCollector implements LogCollector, LogProcessor { private static final Logger LOG = LoggerFactory.getLogger(AbstractLogCollector.class); protected final ExecutorContext executorContext; private final LogTransport transport; private final ConcurrentHashMap<Integer, Future<?>> timeouts = new ConcurrentHashMap<>(); protected final Map<Integer, List<RecordFuture>> deliveryFuturesMap = new HashMap<>(); protected final Map<Integer, BucketInfo> bucketInfoMap = new ConcurrentHashMap<>(); private final KaaChannelManager channelManager; private final FailoverManager failoverManager; protected LogStorage storage; private LogUploadStrategy strategy; private final LogFailoverCommand controller; private LogDeliveryListener logDeliveryListener; private final Object uploadCheckLock = new Object(); private boolean uploadCheckInProgress = false; public AbstractLogCollector(LogTransport transport, ExecutorContext executorContext, KaaChannelManager channelManager, FailoverManager failoverManager) { this.strategy = new DefaultLogUploadStrategy(); this.storage = new MemLogStorage(); this.controller = new DefaultLogUploadController(); this.channelManager = channelManager; this.transport = transport; this.executorContext = executorContext; this.failoverManager = failoverManager; } @Override public void setStrategy(LogUploadStrategy strategy) { if (strategy == null) { throw new IllegalArgumentException("Strategy is null!"); } this.strategy = strategy; LOG.info("New log upload strategy was set: {}", strategy); } @Override public void setStorage(LogStorage storage) { if (storage == null) { throw new IllegalArgumentException("Storage is null!"); } this.storage = storage; LOG.info("New log storage was set {}", storage); } @Override public void fillSyncRequest(LogSyncRequest request) { if (!isUploadAllowed()) { return; } LogBucket bucket = storage.getNextBucket(); if (bucket == null || bucket.getRecords().isEmpty()) { LOG.trace("No logs to send"); return; } List<LogRecord> recordList = bucket.getRecords(); LOG.trace("Sending {} log records", recordList.size()); List<LogEntry> logs = new LinkedList<>(); for (LogRecord record : recordList) { logs.add(new LogEntry(ByteBuffer.wrap(record.getData()))); } request.setRequestId(bucket.getBucketId()); request.setLogEntries(logs); final int bucketId = bucket.getBucketId(); Future<?> timeoutFuture = executorContext.getScheduledExecutor().schedule(new Runnable() { @Override public void run() { if (!Thread.currentThread().isInterrupted()) { checkDeliveryTimeout(bucketId); } else { LOG.debug("Timeout check worker for log bucket: {} was interrupted", bucketId); } } }, strategy.getTimeout(), TimeUnit.SECONDS); LOG.info("Adding following bucket id [{}] for timeout tracking", bucket.getBucketId()); timeouts.put(bucket.getBucketId(), timeoutFuture); } @Override public void onLogResponse(LogSyncResponse logSyncResponse) throws IOException { if (logSyncResponse.getDeliveryStatuses() != null) { boolean isAlreadyScheduled = false; for (LogDeliveryStatus response : logSyncResponse.getDeliveryStatuses()) { final int requestId = response.getRequestId(); final BucketInfo bucketInfo = bucketInfoMap.get(requestId); if (bucketInfo != null) { bucketInfoMap.remove(requestId); if (response.getResult() == SyncResponseResultType.SUCCESS) { storage.removeBucket(response.getRequestId()); if (logDeliveryListener != null) { executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { logDeliveryListener.onLogDeliverySuccess(bucketInfo); } }); } executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { notifyDeliveryFuturesOnSuccess(bucketInfo); } }); } else { storage.rollbackBucket(response.getRequestId()); final LogDeliveryErrorCode errorCode = response.getErrorCode(); final LogFailoverCommand controller = this.controller; executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { strategy.onFailure(controller, errorCode); } }); if (logDeliveryListener != null) { executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { logDeliveryListener.onLogDeliveryFailure(bucketInfo); } }); } isAlreadyScheduled = true; } } else { LOG.warn("BucketInfo is null"); } LOG.info("Removing bucket id from timeouts: {}", response.getRequestId()); Future<?> timeoutFuture = timeouts.remove(response.getRequestId()); if (timeoutFuture != null) { timeoutFuture.cancel(true); } else { LOG.warn("TimeoutFuture is null and cannot be canceled"); } } if (!isAlreadyScheduled) { processUploadDecision(strategy.isUploadNeeded(storage.getStatus())); } } } @Override public void stop() { LOG.debug("Closing storage"); storage.close(); LOG.debug("Clearing timeouts map"); for (Future<?> timeoutFuture : timeouts.values()) { timeoutFuture.cancel(true); } timeouts.clear(); } private void processUploadDecision(LogUploadStrategyDecision decision) { switch (decision) { case UPLOAD: if (isUploadAllowed()) { LOG.debug("Going to upload logs"); transport.sync(); } break; case NOOP: if (strategy.getUploadCheckPeriod() > 0 && storage.getStatus().getRecordCount() > 0) { scheduleUploadCheck(); } break; default: break; } } protected void scheduleUploadCheck() { LOG.trace("Attempt to execute upload check: {}", uploadCheckInProgress); synchronized (uploadCheckLock) { if (!uploadCheckInProgress) { LOG.trace("Scheduling upload check with timeout: {}", strategy.getUploadCheckPeriod()); uploadCheckInProgress = true; executorContext.getScheduledExecutor().schedule(new Runnable() { @Override public void run() { synchronized (uploadCheckLock) { uploadCheckInProgress = false; } uploadIfNeeded(); } }, strategy.getUploadCheckPeriod(), TimeUnit.SECONDS); } else { LOG.trace("Upload check is already scheduled!"); } } } private void checkDeliveryTimeout(final int bucketId) { LOG.debug("Checking for a delivery timeout of the bucket with id: [{}] ", bucketId); Future<?> timeoutFuture = timeouts.remove(bucketId); if (timeoutFuture != null) { LOG.info("Log delivery timeout detected for the bucket with id: [{}]", bucketId); storage.rollbackBucket(bucketId); final LogFailoverCommand controller = this.controller; executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { strategy.onTimeout(controller); } }); if (logDeliveryListener != null) { executorContext.getCallbackExecutor().execute(new Runnable() { @Override public void run() { logDeliveryListener.onLogDeliveryTimeout(bucketInfoMap.get(bucketId)); } }); } timeoutFuture.cancel(true); } else { LOG.trace("No log delivery timeout for the bucket with id [{}] was detected", bucketId); } } private boolean isUploadAllowed() { if (timeouts.size() >= strategy.getMaxParallelUploads()) { LOG.debug("Ignore log upload: too much pending requests {}, max allowed {}", timeouts.size(), strategy.getMaxParallelUploads()); return false; } return true; } protected void uploadIfNeeded() { processUploadDecision(strategy.isUploadNeeded(storage.getStatus())); } private class DefaultLogUploadController implements LogFailoverCommand { @Override public void switchAccessPoint() { TransportConnectionInfo server = channelManager.getActiveServer(TransportType.LOGGING); if (server != null) { failoverManager.onServerFailed(server, FailoverStatus.OPERATION_SERVERS_NA); } else { LOG.warn("Failed to switch Operation server. No channel is used for logging transport"); } } @Override public void retryLogUpload() { uploadIfNeeded(); } @Override public void retryLogUpload(int delay) { executorContext.getScheduledExecutor().schedule(new Runnable() { @Override public void run() { uploadIfNeeded(); } }, delay, TimeUnit.SECONDS); } } @Override public void setLogDeliveryListener(LogDeliveryListener logDeliveryListener) { this.logDeliveryListener = logDeliveryListener; } protected void addDeliveryFuture(BucketInfo info, RecordFuture future) { synchronized (deliveryFuturesMap) { List<RecordFuture> deliveryFutures = deliveryFuturesMap.get(info.getBucketId()); if (deliveryFutures == null) { deliveryFutures = new LinkedList<RecordFuture>(); deliveryFuturesMap.put(info.getBucketId(), deliveryFutures); } deliveryFutures.add(future); } } protected void notifyDeliveryFuturesOnSuccess(BucketInfo info) { synchronized (deliveryFuturesMap) { List<RecordFuture> deliveryFutures = deliveryFuturesMap.get(info.getBucketId()); if (deliveryFutures != null) { for (RecordFuture future : deliveryFutures) { RecordInfo recordInfo = new RecordInfo(info); future.setValue(recordInfo); } deliveryFuturesMap.remove(info.getBucketId()); } } } }
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.gradle.tasks.bundling; import java.io.File; import java.util.Collections; import java.util.concurrent.Callable; import org.gradle.api.Action; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.CopySpec; import org.gradle.api.file.FileCollection; import org.gradle.api.file.FileCopyDetails; import org.gradle.api.file.FileTreeElement; import org.gradle.api.internal.file.copy.CopyAction; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Nested; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.bundling.Jar; /** * A custom {@link Jar} task that produces a Spring Boot executable jar. * * @author Andy Wilkinson * @author Madhura Bhave * @author Scott Frederick * @author Phillip Webb * @since 2.0.0 */ public class BootJar extends Jar implements BootArchive { private static final String LAUNCHER = "org.springframework.boot.loader.JarLauncher"; private static final String CLASSES_DIRECTORY = "BOOT-INF/classes/"; private static final String LIB_DIRECTORY = "BOOT-INF/lib/"; private static final String LAYERS_INDEX = "BOOT-INF/layers.idx"; private static final String CLASSPATH_INDEX = "BOOT-INF/classpath.idx"; private final BootArchiveSupport support; private final CopySpec bootInfSpec; private String mainClassName; private FileCollection classpath; private LayeredSpec layered; /** * Creates a new {@code BootJar} task. */ public BootJar() { this.support = new BootArchiveSupport(LAUNCHER, this::isLibrary, this::resolveZipCompression); this.bootInfSpec = getProject().copySpec().into("BOOT-INF"); configureBootInfSpec(this.bootInfSpec); getMainSpec().with(this.bootInfSpec); } private void configureBootInfSpec(CopySpec bootInfSpec) { bootInfSpec.into("classes", fromCallTo(this::classpathDirectories)); bootInfSpec.into("lib", fromCallTo(this::classpathFiles)).eachFile(this.support::excludeNonZipFiles); bootInfSpec.filesMatching("module-info.class", (details) -> details.setRelativePath(details.getRelativeSourcePath())); } private Iterable<File> classpathDirectories() { return classpathEntries(File::isDirectory); } private Iterable<File> classpathFiles() { return classpathEntries(File::isFile); } private Iterable<File> classpathEntries(Spec<File> filter) { return (this.classpath != null) ? this.classpath.filter(filter) : Collections.emptyList(); } @Override public void copy() { this.support.configureManifest(getManifest(), getMainClassName(), CLASSES_DIRECTORY, LIB_DIRECTORY, CLASSPATH_INDEX, (this.layered != null) ? LAYERS_INDEX : null); super.copy(); } @Override protected CopyAction createCopyAction() { if (this.layered != null) { LayerResolver layerResolver = new LayerResolver(getConfigurations(), this.layered, this::isLibrary); String layerToolsLocation = this.layered.isIncludeLayerTools() ? LIB_DIRECTORY : null; return this.support.createCopyAction(this, layerResolver, layerToolsLocation); } return this.support.createCopyAction(this); } @Internal protected Iterable<Configuration> getConfigurations() { return getProject().getConfigurations(); } @Override public String getMainClassName() { if (this.mainClassName == null) { String manifestStartClass = (String) getManifest().getAttributes().get("Start-Class"); if (manifestStartClass != null) { setMainClassName(manifestStartClass); } } return this.mainClassName; } @Override public void setMainClassName(String mainClassName) { this.mainClassName = mainClassName; } @Override public void requiresUnpack(String... patterns) { this.support.requiresUnpack(patterns); } @Override public void requiresUnpack(Spec<FileTreeElement> spec) { this.support.requiresUnpack(spec); } @Override public LaunchScriptConfiguration getLaunchScript() { return this.support.getLaunchScript(); } @Override public void launchScript() { enableLaunchScriptIfNecessary(); } @Override public void launchScript(Action<LaunchScriptConfiguration> action) { action.execute(enableLaunchScriptIfNecessary()); } /** * Returns the spec that describes the layers in a layerd jar. * @return the spec for the layers or {@code null}. * @since 2.3.0 */ @Nested @Optional public LayeredSpec getLayered() { return this.layered; } /** * Configures the jar to be layered using the default layering. * @since 2.3.0 */ public void layered() { enableLayeringIfNecessary(); } /** * Configures the jar to be layered, customizing the layers using the given * {@code action}. * @param action the action to apply * @since 2.3.0 */ public void layered(Action<LayeredSpec> action) { action.execute(enableLayeringIfNecessary()); } @Override public FileCollection getClasspath() { return this.classpath; } @Override public void classpath(Object... classpath) { FileCollection existingClasspath = this.classpath; this.classpath = getProject().files((existingClasspath != null) ? existingClasspath : Collections.emptyList(), classpath); } @Override public void setClasspath(Object classpath) { this.classpath = getProject().files(classpath); } @Override public void setClasspath(FileCollection classpath) { this.classpath = getProject().files(classpath); } @Override public boolean isExcludeDevtools() { return this.support.isExcludeDevtools(); } @Override public void setExcludeDevtools(boolean excludeDevtools) { this.support.setExcludeDevtools(excludeDevtools); } /** * Returns a {@code CopySpec} that can be used to add content to the {@code BOOT-INF} * directory of the jar. * @return a {@code CopySpec} for {@code BOOT-INF} * @since 2.0.3 */ @Internal public CopySpec getBootInf() { CopySpec child = getProject().copySpec(); this.bootInfSpec.with(child); return child; } /** * Calls the given {@code action} to add content to the {@code BOOT-INF} directory of * the jar. * @param action the {@code Action} to call * @return the {@code CopySpec} for {@code BOOT-INF} that was passed to the * {@code Action} * @since 2.0.3 */ public CopySpec bootInf(Action<CopySpec> action) { CopySpec bootInf = getBootInf(); action.execute(bootInf); return bootInf; } /** * Return the {@link ZipCompression} that should be used when adding the file * represented by the given {@code details} to the jar. By default, any * {@link #isLibrary(FileCopyDetails) library} is {@link ZipCompression#STORED stored} * and all other files are {@link ZipCompression#DEFLATED deflated}. * @param details the file copy details * @return the compression to use */ protected ZipCompression resolveZipCompression(FileCopyDetails details) { return isLibrary(details) ? ZipCompression.STORED : ZipCompression.DEFLATED; } /** * Return if the {@link FileCopyDetails} are for a library. By default any file in * {@code BOOT-INF/lib} is considered to be a library. * @param details the file copy details * @return {@code true} if the details are for a library * @since 2.3.0 */ protected boolean isLibrary(FileCopyDetails details) { String path = details.getRelativePath().getPathString(); return path.startsWith(LIB_DIRECTORY); } private LaunchScriptConfiguration enableLaunchScriptIfNecessary() { LaunchScriptConfiguration launchScript = this.support.getLaunchScript(); if (launchScript == null) { launchScript = new LaunchScriptConfiguration(this); this.support.setLaunchScript(launchScript); } return launchScript; } private LayeredSpec enableLayeringIfNecessary() { if (this.layered == null) { this.layered = new LayeredSpec(); } return this.layered; } /** * Syntactic sugar that makes {@link CopySpec#into} calls a little easier to read. * @param <T> the result type * @param callable the callable * @return an action to add the callable to the spec */ private static <T> Action<CopySpec> fromCallTo(Callable<T> callable) { return (spec) -> spec.from(callTo(callable)); } /** * Syntactic sugar that makes {@link CopySpec#from} calls a little easier to read. * @param <T> the result type * @param callable the callable * @return the callable */ private static <T> Callable<T> callTo(Callable<T> callable) { return callable; } }
package net.coding.program.third; import android.graphics.Bitmap; /** * Created by paveld on 3/6/14. */ public class FastBlur { public static Bitmap doBlur(Bitmap sentBitmap, int radius, boolean canReuseInBitmap) { // Stack Blur v1.0 from // http://www.quasimondo.com/StackBlurForCanvas/StackBlurDemo.html // // Java Author: Mario Klingemann <mario at quasimondo.com> // http://incubator.quasimondo.com // created Feburary 29, 2004 // Android port : Yahel Bouaziz <yahel at kayenko.com> // http://www.kayenko.com // ported april 5th, 2012 // This is a compromise between Gaussian Blur and Box blur // It creates much better looking blurs than Box Blur, but is // 7x faster than my Gaussian Blur implementation. // // I called it Stack Blur because this describes best how this // filter works internally: it creates a kind of moving stack // of colors whilst scanning through the image. Thereby it // just has to add one new block of color to the right side // of the stack and remove the leftmost color. The remaining // colors on the topmost layer of the stack are either added on // or reduced by one, depending on if they are on the right or // on the left side of the stack. // // If you are using this algorithm in your code please add // the following line: // // Stack Blur Algorithm by Mario Klingemann <mario@quasimondo.com> Bitmap bitmap; if (canReuseInBitmap) { bitmap = sentBitmap; } else { bitmap = sentBitmap.copy(sentBitmap.getConfig(), true); } if (radius < 1) { return (null); } int w = bitmap.getWidth(); int h = bitmap.getHeight(); int[] pix = new int[w * h]; bitmap.getPixels(pix, 0, w, 0, 0, w, h); int wm = w - 1; int hm = h - 1; int wh = w * h; int div = radius + radius + 1; int r[] = new int[wh]; int g[] = new int[wh]; int b[] = new int[wh]; int rsum, gsum, bsum, x, y, i, p, yp, yi, yw; int vmin[] = new int[Math.max(w, h)]; int divsum = (div + 1) >> 1; divsum *= divsum; int dv[] = new int[256 * divsum]; for (i = 0; i < 256 * divsum; i++) { dv[i] = (i / divsum); } yw = yi = 0; int[][] stack = new int[div][3]; int stackpointer; int stackstart; int[] sir; int rbs; int r1 = radius + 1; int routsum, goutsum, boutsum; int rinsum, ginsum, binsum; for (y = 0; y < h; y++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; for (i = -radius; i <= radius; i++) { p = pix[yi + Math.min(wm, Math.max(i, 0))]; sir = stack[i + radius]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rbs = r1 - Math.abs(i); rsum += sir[0] * rbs; gsum += sir[1] * rbs; bsum += sir[2] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } } stackpointer = radius; for (x = 0; x < w; x++) { r[yi] = dv[rsum]; g[yi] = dv[gsum]; b[yi] = dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (y == 0) { vmin[x] = Math.min(x + radius + 1, wm); } p = pix[yw + vmin[x]]; sir[0] = (p & 0xff0000) >> 16; sir[1] = (p & 0x00ff00) >> 8; sir[2] = (p & 0x0000ff); rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[(stackpointer) % div]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi++; } yw += w; } for (x = 0; x < w; x++) { rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0; yp = -radius * w; for (i = -radius; i <= radius; i++) { yi = Math.max(0, yp) + x; sir = stack[i + radius]; sir[0] = r[yi]; sir[1] = g[yi]; sir[2] = b[yi]; rbs = r1 - Math.abs(i); rsum += r[yi] * rbs; gsum += g[yi] * rbs; bsum += b[yi] * rbs; if (i > 0) { rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; } else { routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; } if (i < hm) { yp += w; } } yi = x; stackpointer = radius; for (y = 0; y < h; y++) { // Preserve alpha channel: ( 0xff000000 & pix[yi] ) pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16) | (dv[gsum] << 8) | dv[bsum]; rsum -= routsum; gsum -= goutsum; bsum -= boutsum; stackstart = stackpointer - radius + div; sir = stack[stackstart % div]; routsum -= sir[0]; goutsum -= sir[1]; boutsum -= sir[2]; if (x == 0) { vmin[y] = Math.min(y + r1, hm) * w; } p = x + vmin[y]; sir[0] = r[p]; sir[1] = g[p]; sir[2] = b[p]; rinsum += sir[0]; ginsum += sir[1]; binsum += sir[2]; rsum += rinsum; gsum += ginsum; bsum += binsum; stackpointer = (stackpointer + 1) % div; sir = stack[stackpointer]; routsum += sir[0]; goutsum += sir[1]; boutsum += sir[2]; rinsum -= sir[0]; ginsum -= sir[1]; binsum -= sir[2]; yi += w; } } bitmap.setPixels(pix, 0, w, 0, 0, w, h); return (bitmap); } }
/* * Copyright 2015 NEC Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.o3project.odenos.core.component.network.flow.ofpflow; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import org.msgpack.type.Value; import java.util.HashMap; import java.util.Map; public class OFPFlowActionDecIpTtlTest { private OFPFlowActionDecIpTtl target; /** * @throws java.lang.Exception throws Exception in targets */ @BeforeClass public static void setUpBeforeClass() throws Exception { } /** * @throws java.lang.Exception throws Exception in targets */ @AfterClass public static void tearDownAfterClass() throws Exception { } /** * @throws java.lang.Exception throws Exception in targets */ @Before public void setUp() throws Exception { target = spy(new OFPFlowActionDecIpTtl()); } /** * @throws java.lang.Exception throws Exception in targets */ @After public void tearDown() throws Exception { target = null; } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#validate()} * . */ @Test public final void testValidate() { assertThat(target.validate(), is(true)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#getType()} * . */ @Test public final void testGetType() { assertThat(target.getType(), is("OFPFlowActionDecIpTtl")); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#readValue(org.msgpack.type.Value)} * . */ @Test public final void testReadValue() { Value value = Mockito.mock(Value.class); assertThat(target.readValue(value), is(true)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#writeValueSub(java.util.Map)} * . */ @Test public final void testWriteValueSub() { /* * set */ Map<String, Value> map = new HashMap<String, Value>(); /* * test */ boolean result = target.writeValueSub(map); /* * check */ assertThat(result, is(true)); assertThat(map.get("type").toString(), is("\"OFPFlowActionDecIpTtl\"")); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#equals(java.lang.Object)} * . */ @Test public final void testEqualsObject() { boolean result = target.equals(target); assertThat(result, is(true)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#equals(java.lang.Object)} * . */ @Test public final void testEqualsObjectNull() { boolean result = target.equals(null); assertThat(result, is(false)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#equals(java.lang.Object)} * . */ @Test public final void testEqualsObjectNotOFPFlowActionDecIpTtl() { boolean result = target.equals("String"); assertThat(result, is(false)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#equals(java.lang.Object)} * . */ @Test public final void testEqualsObjectSuperFalse() { /* * set */ OFPFlowActionDecIpTtl value = new OFPFlowActionDecIpTtl(); doReturn("type").when(target).getType(); /* * test */ boolean result = target.equals(value); /* * check */ assertThat(result, is(false)); } /** * Test method for * {@link org.o3project.odenos.core.component.network.flow.ofpflow.OFPFlowActionDecIpTtl#toString()} * . */ @Test public final void testToString() { /* * setting */ target = new OFPFlowActionDecIpTtl(); /* * test */ String result = target.toString(); /* * check */ assertThat(result.endsWith(target.getType()), is(true)); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codedeploy.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Represents the input of a list on-premises instances operation. * </p> * . */ public class ListOnPremisesInstancesRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> */ private String registrationStatus; /** * <p> * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * </p> */ private com.amazonaws.internal.SdkInternalList<TagFilter> tagFilters; /** * <p> * An identifier returned from the previous list on-premises instances call. * It can be used to return the next set of on-premises instances in the * list. * </p> */ private String nextToken; /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> * * @param registrationStatus * The registration status of the on-premises instances:</p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in * the resulting list.</li> * <li>Registered: Include registered on-premises instances in the * resulting list.</li> * @see RegistrationStatus */ public void setRegistrationStatus(String registrationStatus) { this.registrationStatus = registrationStatus; } /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> * * @return The registration status of the on-premises instances:</p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in * the resulting list.</li> * <li>Registered: Include registered on-premises instances in the * resulting list.</li> * @see RegistrationStatus */ public String getRegistrationStatus() { return this.registrationStatus; } /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> * * @param registrationStatus * The registration status of the on-premises instances:</p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in * the resulting list.</li> * <li>Registered: Include registered on-premises instances in the * resulting list.</li> * @return Returns a reference to this object so that method calls can be * chained together. * @see RegistrationStatus */ public ListOnPremisesInstancesRequest withRegistrationStatus( String registrationStatus) { setRegistrationStatus(registrationStatus); return this; } /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> * * @param registrationStatus * The registration status of the on-premises instances:</p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in * the resulting list.</li> * <li>Registered: Include registered on-premises instances in the * resulting list.</li> * @return Returns a reference to this object so that method calls can be * chained together. * @see RegistrationStatus */ public void setRegistrationStatus(RegistrationStatus registrationStatus) { this.registrationStatus = registrationStatus.toString(); } /** * <p> * The registration status of the on-premises instances: * </p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in the * resulting list.</li> * <li>Registered: Include registered on-premises instances in the resulting * list.</li> * </ul> * * @param registrationStatus * The registration status of the on-premises instances:</p> * <ul> * <li>Deregistered: Include deregistered on-premises instances in * the resulting list.</li> * <li>Registered: Include registered on-premises instances in the * resulting list.</li> * @return Returns a reference to this object so that method calls can be * chained together. * @see RegistrationStatus */ public ListOnPremisesInstancesRequest withRegistrationStatus( RegistrationStatus registrationStatus) { setRegistrationStatus(registrationStatus); return this; } /** * <p> * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * </p> * * @return The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. */ public java.util.List<TagFilter> getTagFilters() { if (tagFilters == null) { tagFilters = new com.amazonaws.internal.SdkInternalList<TagFilter>(); } return tagFilters; } /** * <p> * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * </p> * * @param tagFilters * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. */ public void setTagFilters(java.util.Collection<TagFilter> tagFilters) { if (tagFilters == null) { this.tagFilters = null; return; } this.tagFilters = new com.amazonaws.internal.SdkInternalList<TagFilter>( tagFilters); } /** * <p> * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setTagFilters(java.util.Collection)} or * {@link #withTagFilters(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tagFilters * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListOnPremisesInstancesRequest withTagFilters( TagFilter... tagFilters) { if (this.tagFilters == null) { setTagFilters(new com.amazonaws.internal.SdkInternalList<TagFilter>( tagFilters.length)); } for (TagFilter ele : tagFilters) { this.tagFilters.add(ele); } return this; } /** * <p> * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * </p> * * @param tagFilters * The on-premises instance tags that will be used to restrict the * corresponding on-premises instance names returned. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListOnPremisesInstancesRequest withTagFilters( java.util.Collection<TagFilter> tagFilters) { setTagFilters(tagFilters); return this; } /** * <p> * An identifier returned from the previous list on-premises instances call. * It can be used to return the next set of on-premises instances in the * list. * </p> * * @param nextToken * An identifier returned from the previous list on-premises * instances call. It can be used to return the next set of * on-premises instances in the list. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * An identifier returned from the previous list on-premises instances call. * It can be used to return the next set of on-premises instances in the * list. * </p> * * @return An identifier returned from the previous list on-premises * instances call. It can be used to return the next set of * on-premises instances in the list. */ public String getNextToken() { return this.nextToken; } /** * <p> * An identifier returned from the previous list on-premises instances call. * It can be used to return the next set of on-premises instances in the * list. * </p> * * @param nextToken * An identifier returned from the previous list on-premises * instances call. It can be used to return the next set of * on-premises instances in the list. * @return Returns a reference to this object so that method calls can be * chained together. */ public ListOnPremisesInstancesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRegistrationStatus() != null) sb.append("RegistrationStatus: " + getRegistrationStatus() + ","); if (getTagFilters() != null) sb.append("TagFilters: " + getTagFilters() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListOnPremisesInstancesRequest == false) return false; ListOnPremisesInstancesRequest other = (ListOnPremisesInstancesRequest) obj; if (other.getRegistrationStatus() == null ^ this.getRegistrationStatus() == null) return false; if (other.getRegistrationStatus() != null && other.getRegistrationStatus().equals( this.getRegistrationStatus()) == false) return false; if (other.getTagFilters() == null ^ this.getTagFilters() == null) return false; if (other.getTagFilters() != null && other.getTagFilters().equals(this.getTagFilters()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRegistrationStatus() == null) ? 0 : getRegistrationStatus().hashCode()); hashCode = prime * hashCode + ((getTagFilters() == null) ? 0 : getTagFilters().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListOnPremisesInstancesRequest clone() { return (ListOnPremisesInstancesRequest) super.clone(); } }
package com.google.api.ads.dfp.jaxws.v201408; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * A {@code ReconciliationReportRow} represents each row in the reconciliation report. * Each row is identified by its {@link #reconciliationReportId}, {@link #lineItemId}, * {@link #creativeId}, and {@link #proposalLineItemId}. * * * <p>Java class for ReconciliationReportRow complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ReconciliationReportRow"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="reconciliationReportId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="lineItemId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="creativeId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="orderId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="advertiserId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="proposalLineItemId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="proposalId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="billFrom" type="{https://www.google.com/apis/ads/publisher/v201408}BillFrom" minOccurs="0"/> * &lt;element name="rateType" type="{https://www.google.com/apis/ads/publisher/v201408}RateType" minOccurs="0"/> * &lt;element name="lineItemCostPerUnit" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="lineItemContractedUnitsBought" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="dfpVolume" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="thirdPartyVolume" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="manualVolume" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="reconciledVolume" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/> * &lt;element name="contractedRevenue" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="dfpRevenue" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="thirdPartyRevenue" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="manualRevenue" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="reconciledRevenue" type="{https://www.google.com/apis/ads/publisher/v201408}Money" minOccurs="0"/> * &lt;element name="comments" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ReconciliationReportRow", propOrder = { "reconciliationReportId", "lineItemId", "creativeId", "orderId", "advertiserId", "proposalLineItemId", "proposalId", "billFrom", "rateType", "lineItemCostPerUnit", "lineItemContractedUnitsBought", "dfpVolume", "thirdPartyVolume", "manualVolume", "reconciledVolume", "contractedRevenue", "dfpRevenue", "thirdPartyRevenue", "manualRevenue", "reconciledRevenue", "comments" }) public class ReconciliationReportRow { protected Long reconciliationReportId; protected Long lineItemId; protected Long creativeId; protected Long orderId; protected Long advertiserId; protected Long proposalLineItemId; protected Long proposalId; @XmlSchemaType(name = "string") protected BillFrom billFrom; @XmlSchemaType(name = "string") protected RateType rateType; protected Money lineItemCostPerUnit; protected Long lineItemContractedUnitsBought; protected Long dfpVolume; protected Long thirdPartyVolume; protected Long manualVolume; protected Long reconciledVolume; protected Money contractedRevenue; protected Money dfpRevenue; protected Money thirdPartyRevenue; protected Money manualRevenue; protected Money reconciledRevenue; protected String comments; /** * Gets the value of the reconciliationReportId property. * * @return * possible object is * {@link Long } * */ public Long getReconciliationReportId() { return reconciliationReportId; } /** * Sets the value of the reconciliationReportId property. * * @param value * allowed object is * {@link Long } * */ public void setReconciliationReportId(Long value) { this.reconciliationReportId = value; } /** * Gets the value of the lineItemId property. * * @return * possible object is * {@link Long } * */ public Long getLineItemId() { return lineItemId; } /** * Sets the value of the lineItemId property. * * @param value * allowed object is * {@link Long } * */ public void setLineItemId(Long value) { this.lineItemId = value; } /** * Gets the value of the creativeId property. * * @return * possible object is * {@link Long } * */ public Long getCreativeId() { return creativeId; } /** * Sets the value of the creativeId property. * * @param value * allowed object is * {@link Long } * */ public void setCreativeId(Long value) { this.creativeId = value; } /** * Gets the value of the orderId property. * * @return * possible object is * {@link Long } * */ public Long getOrderId() { return orderId; } /** * Sets the value of the orderId property. * * @param value * allowed object is * {@link Long } * */ public void setOrderId(Long value) { this.orderId = value; } /** * Gets the value of the advertiserId property. * * @return * possible object is * {@link Long } * */ public Long getAdvertiserId() { return advertiserId; } /** * Sets the value of the advertiserId property. * * @param value * allowed object is * {@link Long } * */ public void setAdvertiserId(Long value) { this.advertiserId = value; } /** * Gets the value of the proposalLineItemId property. * * @return * possible object is * {@link Long } * */ public Long getProposalLineItemId() { return proposalLineItemId; } /** * Sets the value of the proposalLineItemId property. * * @param value * allowed object is * {@link Long } * */ public void setProposalLineItemId(Long value) { this.proposalLineItemId = value; } /** * Gets the value of the proposalId property. * * @return * possible object is * {@link Long } * */ public Long getProposalId() { return proposalId; } /** * Sets the value of the proposalId property. * * @param value * allowed object is * {@link Long } * */ public void setProposalId(Long value) { this.proposalId = value; } /** * Gets the value of the billFrom property. * * @return * possible object is * {@link BillFrom } * */ public BillFrom getBillFrom() { return billFrom; } /** * Sets the value of the billFrom property. * * @param value * allowed object is * {@link BillFrom } * */ public void setBillFrom(BillFrom value) { this.billFrom = value; } /** * Gets the value of the rateType property. * * @return * possible object is * {@link RateType } * */ public RateType getRateType() { return rateType; } /** * Sets the value of the rateType property. * * @param value * allowed object is * {@link RateType } * */ public void setRateType(RateType value) { this.rateType = value; } /** * Gets the value of the lineItemCostPerUnit property. * * @return * possible object is * {@link Money } * */ public Money getLineItemCostPerUnit() { return lineItemCostPerUnit; } /** * Sets the value of the lineItemCostPerUnit property. * * @param value * allowed object is * {@link Money } * */ public void setLineItemCostPerUnit(Money value) { this.lineItemCostPerUnit = value; } /** * Gets the value of the lineItemContractedUnitsBought property. * * @return * possible object is * {@link Long } * */ public Long getLineItemContractedUnitsBought() { return lineItemContractedUnitsBought; } /** * Sets the value of the lineItemContractedUnitsBought property. * * @param value * allowed object is * {@link Long } * */ public void setLineItemContractedUnitsBought(Long value) { this.lineItemContractedUnitsBought = value; } /** * Gets the value of the dfpVolume property. * * @return * possible object is * {@link Long } * */ public Long getDfpVolume() { return dfpVolume; } /** * Sets the value of the dfpVolume property. * * @param value * allowed object is * {@link Long } * */ public void setDfpVolume(Long value) { this.dfpVolume = value; } /** * Gets the value of the thirdPartyVolume property. * * @return * possible object is * {@link Long } * */ public Long getThirdPartyVolume() { return thirdPartyVolume; } /** * Sets the value of the thirdPartyVolume property. * * @param value * allowed object is * {@link Long } * */ public void setThirdPartyVolume(Long value) { this.thirdPartyVolume = value; } /** * Gets the value of the manualVolume property. * * @return * possible object is * {@link Long } * */ public Long getManualVolume() { return manualVolume; } /** * Sets the value of the manualVolume property. * * @param value * allowed object is * {@link Long } * */ public void setManualVolume(Long value) { this.manualVolume = value; } /** * Gets the value of the reconciledVolume property. * * @return * possible object is * {@link Long } * */ public Long getReconciledVolume() { return reconciledVolume; } /** * Sets the value of the reconciledVolume property. * * @param value * allowed object is * {@link Long } * */ public void setReconciledVolume(Long value) { this.reconciledVolume = value; } /** * Gets the value of the contractedRevenue property. * * @return * possible object is * {@link Money } * */ public Money getContractedRevenue() { return contractedRevenue; } /** * Sets the value of the contractedRevenue property. * * @param value * allowed object is * {@link Money } * */ public void setContractedRevenue(Money value) { this.contractedRevenue = value; } /** * Gets the value of the dfpRevenue property. * * @return * possible object is * {@link Money } * */ public Money getDfpRevenue() { return dfpRevenue; } /** * Sets the value of the dfpRevenue property. * * @param value * allowed object is * {@link Money } * */ public void setDfpRevenue(Money value) { this.dfpRevenue = value; } /** * Gets the value of the thirdPartyRevenue property. * * @return * possible object is * {@link Money } * */ public Money getThirdPartyRevenue() { return thirdPartyRevenue; } /** * Sets the value of the thirdPartyRevenue property. * * @param value * allowed object is * {@link Money } * */ public void setThirdPartyRevenue(Money value) { this.thirdPartyRevenue = value; } /** * Gets the value of the manualRevenue property. * * @return * possible object is * {@link Money } * */ public Money getManualRevenue() { return manualRevenue; } /** * Sets the value of the manualRevenue property. * * @param value * allowed object is * {@link Money } * */ public void setManualRevenue(Money value) { this.manualRevenue = value; } /** * Gets the value of the reconciledRevenue property. * * @return * possible object is * {@link Money } * */ public Money getReconciledRevenue() { return reconciledRevenue; } /** * Sets the value of the reconciledRevenue property. * * @param value * allowed object is * {@link Money } * */ public void setReconciledRevenue(Money value) { this.reconciledRevenue = value; } /** * Gets the value of the comments property. * * @return * possible object is * {@link String } * */ public String getComments() { return comments; } /** * Sets the value of the comments property. * * @param value * allowed object is * {@link String } * */ public void setComments(String value) { this.comments = value; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.connector.informationschema; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.QualifiedTablePrefix; import io.trino.security.AccessControl; import io.trino.spi.Page; import io.trino.spi.PageBuilder; import io.trino.spi.block.Block; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.ColumnMetadata; import io.trino.spi.connector.ConnectorPageSource; import io.trino.spi.connector.ConnectorViewDefinition; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.security.AccessDeniedException; import io.trino.spi.security.GrantInfo; import io.trino.spi.security.RoleGrant; import io.trino.spi.security.TrinoPrincipal; import io.trino.spi.type.Type; import java.util.ArrayDeque; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalLong; import java.util.Queue; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.IntStream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Sets.union; import static io.trino.SystemSessionProperties.isOmitDateTimeTypePrecision; import static io.trino.connector.informationschema.InformationSchemaMetadata.defaultPrefixes; import static io.trino.connector.informationschema.InformationSchemaMetadata.isTablesEnumeratingTable; import static io.trino.metadata.MetadataListing.getViews; import static io.trino.metadata.MetadataListing.listSchemas; import static io.trino.metadata.MetadataListing.listTableColumns; import static io.trino.metadata.MetadataListing.listTablePrivileges; import static io.trino.metadata.MetadataListing.listTables; import static io.trino.metadata.MetadataListing.listViews; import static io.trino.spi.security.PrincipalType.USER; import static io.trino.spi.type.TypeUtils.writeNativeValue; import static io.trino.type.TypeUtils.getDisplayLabel; import static java.util.Objects.requireNonNull; public class InformationSchemaPageSource implements ConnectorPageSource { private final Session session; private final Metadata metadata; private final AccessControl accessControl; private final String catalogName; private final InformationSchemaTable table; private final Supplier<Iterator<QualifiedTablePrefix>> prefixIterator; private final OptionalLong limit; private final List<Type> types; private final Queue<Page> pages = new ArrayDeque<>(); private final PageBuilder pageBuilder; private final Function<Page, Page> projection; private final Optional<Set<String>> roles; private final Optional<Set<String>> grantees; private long recordCount; private long completedBytes; private long memoryUsageBytes; private boolean closed; public InformationSchemaPageSource( Session session, Metadata metadata, AccessControl accessControl, InformationSchemaTableHandle tableHandle, List<ColumnHandle> columns) { this.session = requireNonNull(session, "session is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.accessControl = requireNonNull(accessControl, "accessControl is null"); requireNonNull(tableHandle, "tableHandle is null"); requireNonNull(columns, "columns is null"); catalogName = tableHandle.getCatalogName(); table = tableHandle.getTable(); prefixIterator = Suppliers.memoize(() -> { Set<QualifiedTablePrefix> prefixes = tableHandle.getPrefixes(); if (tableHandle.getLimit().isEmpty()) { // no limit is used, therefore it doesn't make sense to split information schema query into smaller ones return prefixes.iterator(); } if (isTablesEnumeratingTable(table)) { if (prefixes.equals(defaultPrefixes(catalogName))) { prefixes = metadata.listSchemaNames(session, catalogName).stream() .map(schema -> new QualifiedTablePrefix(catalogName, schema)) .collect(toImmutableSet()); } } else { checkArgument(prefixes.equals(defaultPrefixes(catalogName)), "Catalog-wise tables have prefixes other than the default one"); } return prefixes.iterator(); }); limit = tableHandle.getLimit(); roles = tableHandle.getRoles(); grantees = tableHandle.getGrantees(); List<ColumnMetadata> columnMetadata = table.getTableMetadata().getColumns(); types = columnMetadata.stream() .map(ColumnMetadata::getType) .collect(toImmutableList()); pageBuilder = new PageBuilder(types); Map<String, Integer> columnNameToChannel = IntStream.range(0, columnMetadata.size()) .boxed() .collect(toImmutableMap(i -> columnMetadata.get(i).getName(), Function.identity())); List<Integer> channels = columns.stream() .map(columnHandle -> (InformationSchemaColumnHandle) columnHandle) .map(columnHandle -> columnNameToChannel.get(columnHandle.getColumnName())) .collect(toImmutableList()); projection = page -> { Block[] blocks = new Block[channels.size()]; for (int i = 0; i < blocks.length; i++) { blocks[i] = page.getBlock(channels.get(i)); } return new Page(page.getPositionCount(), blocks); }; } @Override public long getCompletedBytes() { return completedBytes; } @Override public long getReadTimeNanos() { return 0; } @Override public boolean isFinished() { return closed || (pages.isEmpty() && (!prefixIterator.get().hasNext() || isLimitExhausted())); } @Override public Page getNextPage() { if (isFinished()) { return null; } if (pages.isEmpty()) { buildPages(); } Page page = pages.poll(); if (page == null) { return null; } memoryUsageBytes -= page.getRetainedSizeInBytes(); Page outputPage = projection.apply(page); completedBytes += outputPage.getSizeInBytes(); return outputPage; } @Override public long getSystemMemoryUsage() { return memoryUsageBytes + pageBuilder.getRetainedSizeInBytes(); } @Override public void close() { closed = true; } private void buildPages() { while (pages.isEmpty() && prefixIterator.get().hasNext() && !closed && !isLimitExhausted()) { QualifiedTablePrefix prefix = prefixIterator.get().next(); switch (table) { case COLUMNS: addColumnsRecords(prefix); break; case TABLES: addTablesRecords(prefix); break; case VIEWS: addViewsRecords(prefix); break; case SCHEMATA: addSchemataRecords(); break; case TABLE_PRIVILEGES: addTablePrivilegesRecords(prefix); break; case ROLES: addRolesRecords(); break; case APPLICABLE_ROLES: addApplicableRolesRecords(); break; case ENABLED_ROLES: addEnabledRolesRecords(); break; case ROLE_AUTHORIZATION_DESCRIPTORS: addRoleAuthorizationDescriptorRecords(); break; } } if (!prefixIterator.get().hasNext() || isLimitExhausted()) { flushPageBuilder(); } } private void addColumnsRecords(QualifiedTablePrefix prefix) { for (Map.Entry<SchemaTableName, List<ColumnMetadata>> entry : listTableColumns(session, metadata, accessControl, prefix).entrySet()) { SchemaTableName tableName = entry.getKey(); int ordinalPosition = 1; for (ColumnMetadata column : entry.getValue()) { if (column.isHidden()) { continue; } addRecord( prefix.getCatalogName(), tableName.getSchemaName(), tableName.getTableName(), column.getName(), ordinalPosition, null, "YES", getDisplayLabel(column.getType(), isOmitDateTimeTypePrecision(session)), column.getComment(), column.getExtraInfo(), column.getComment()); ordinalPosition++; if (isLimitExhausted()) { return; } } } } private void addTablesRecords(QualifiedTablePrefix prefix) { Set<SchemaTableName> tables = listTables(session, metadata, accessControl, prefix); Set<SchemaTableName> views = listViews(session, metadata, accessControl, prefix); for (SchemaTableName name : union(tables, views)) { // if table and view names overlap, the view wins String type = views.contains(name) ? "VIEW" : "BASE TABLE"; addRecord( prefix.getCatalogName(), name.getSchemaName(), name.getTableName(), type, null); if (isLimitExhausted()) { return; } } } private void addViewsRecords(QualifiedTablePrefix prefix) { for (Map.Entry<SchemaTableName, ConnectorViewDefinition> entry : getViews(session, metadata, accessControl, prefix).entrySet()) { addRecord( prefix.getCatalogName(), entry.getKey().getSchemaName(), entry.getKey().getTableName(), entry.getValue().getOriginalSql()); if (isLimitExhausted()) { return; } } } private void addSchemataRecords() { for (String schema : listSchemas(session, metadata, accessControl, catalogName)) { addRecord(catalogName, schema); if (isLimitExhausted()) { return; } } } private void addTablePrivilegesRecords(QualifiedTablePrefix prefix) { List<GrantInfo> grants = ImmutableList.copyOf(listTablePrivileges(session, metadata, accessControl, prefix)); for (GrantInfo grant : grants) { addRecord( grant.getGrantor().map(TrinoPrincipal::getName).orElse(null), grant.getGrantor().map(principal -> principal.getType().toString()).orElse(null), grant.getGrantee().getName(), grant.getGrantee().getType().toString(), prefix.getCatalogName(), grant.getSchemaTableName().getSchemaName(), grant.getSchemaTableName().getTableName(), grant.getPrivilegeInfo().getPrivilege().name(), grant.getPrivilegeInfo().isGrantOption() ? "YES" : "NO", grant.getWithHierarchy().map(withHierarchy -> withHierarchy ? "YES" : "NO").orElse(null)); if (isLimitExhausted()) { return; } } } private void addRolesRecords() { try { accessControl.checkCanShowRoles(session.toSecurityContext(), catalogName); } catch (AccessDeniedException exception) { return; } for (String role : metadata.listRoles(session, catalogName)) { addRecord(role); if (isLimitExhausted()) { return; } } } private void addRoleAuthorizationDescriptorRecords() { try { accessControl.checkCanShowRoleAuthorizationDescriptors(session.toSecurityContext(), catalogName); } catch (AccessDeniedException exception) { return; } for (RoleGrant grant : metadata.listAllRoleGrants(session, catalogName, roles, grantees, limit)) { addRecord( grant.getRoleName(), null, // grantor null, // grantor type grant.getGrantee().getName(), grant.getGrantee().getType().toString(), grant.isGrantable() ? "YES" : "NO"); if (isLimitExhausted()) { return; } } } private void addApplicableRolesRecords() { for (RoleGrant grant : metadata.listApplicableRoles(session, new TrinoPrincipal(USER, session.getUser()), catalogName)) { addRecord( grant.getGrantee().getName(), grant.getGrantee().getType().toString(), grant.getRoleName(), grant.isGrantable() ? "YES" : "NO"); if (isLimitExhausted()) { return; } } } private void addEnabledRolesRecords() { for (String role : metadata.listEnabledRoles(session, catalogName)) { addRecord(role); if (isLimitExhausted()) { return; } } } private void addRecord(Object... values) { pageBuilder.declarePosition(); for (int i = 0; i < types.size(); i++) { writeNativeValue(types.get(i), pageBuilder.getBlockBuilder(i), values[i]); } if (pageBuilder.isFull()) { flushPageBuilder(); } recordCount++; } private void flushPageBuilder() { if (!pageBuilder.isEmpty()) { pages.add(pageBuilder.build()); memoryUsageBytes += pageBuilder.getRetainedSizeInBytes(); pageBuilder.reset(); } } private boolean isLimitExhausted() { return limit.isPresent() && recordCount >= limit.getAsLong(); } }
/** * Copyright 2007-2016, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.gateway.transport.wsn.specification.ws.acceptor; import static org.kaazing.test.util.ITUtil.createRuleChain; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.kaazing.gateway.server.test.GatewayRule; import org.kaazing.gateway.server.test.config.GatewayConfiguration; import org.kaazing.gateway.server.test.config.builder.GatewayConfigurationBuilder; import org.kaazing.k3po.junit.annotation.Specification; import org.kaazing.k3po.junit.rules.K3poRule; /** * RFC-6455, section 4.1 "Client-Side Requirements" * RFC-6455, section 4.2 "Server-Side Requirements" */ public class OpeningHandshakeIT { private final K3poRule k3po = new K3poRule().setScriptRoot("org/kaazing/specification/ws/opening"); private GatewayRule gateway = new GatewayRule() { { // @formatter:off GatewayConfiguration configuration = new GatewayConfigurationBuilder() .service() .accept("ws://localhost:8080/path") .type("echo") .crossOrigin() .allowOrigin("*") .done() .done() .service() .accept("ws://localhost:8080/preflight") .type("echo") .crossOrigin() .allowOrigin("*") .done() .done() .done(); // @formatter:on init(configuration); } }; @Rule public TestRule chain = createRuleChain(gateway, k3po); // TODO: // proxy => HTTP CONNECT w/ optional authorization, auto-configuration via ws://, wss:// // TLS (not SSL) w/ SNI for wss:// @Test @Specification({ "connection.established/handshake.request" }) public void shouldEstablishConnection() throws Exception { k3po.finish(); } @Test @Ignore("This spec test will not work. Preflight cookie request not 'a thing'. Talk to engineers.") @Specification({ "request.header.cookie/handshake.request" }) public void shouldEstablishConnectionWithCookieRequestHeader() throws Exception { k3po.finish(); } @Test @Specification({ "request.headers.random.case/handshake.request" }) public void shouldEstablishConnectionWithRandomCaseRequestHeaders() throws Exception { k3po.finish(); } @Test @Specification({ "response.headers.random.case/handshake.request" }) public void shouldEstablishConnectionWithRandomCaseResponseHeaders() throws Exception { k3po.finish(); } @Test @Specification({ "request.header.origin/handshake.request" }) public void shouldEstablishConnectionWithRequestHeaderOrigin() throws Exception { k3po.finish(); } @Test @Ignore("Missing HTTP header") @Specification({ "request.header.sec.websocket.protocol/handshake.request" }) public void shouldEstablishConnectionWithRequestHeaderSecWebSocketProtocol() throws Exception { k3po.finish(); } @Test @Ignore("Missing HTTP header") @Specification({ "request.header.sec.websocket.extensions/handshake.request" }) public void shouldEstablishConnectionWithRequestHeaderSecWebSocketExtensions() throws Exception { k3po.finish(); } @Test @Ignore("java.lang.InterruptedException") @Specification({ "response.header.sec.websocket.extensions.partial.agreement/handshake.request" }) public void shouldEstablishConnectionWithSomeExtensionsNegotiated() throws Exception { k3po.finish(); } @Test @Ignore("Internal Error: Script not found") @Specification({ "response.header.sec.websocket.extensions.reordered/handshake.request" }) public void shouldEstablishConnectionWhenOrderOfExtensionsNegotiatedChanged() throws Exception { k3po.finish(); } // Gateway sending payload @Test @Ignore("Gateway sending payload with 405 Not Allowed, <html><head></head><body><h1>405 Method Not Allowed</h1></body></html>") @Specification({ "request.method.not.get/handshake.request" }) public void shouldFailHandshakeWhenMethodNotGet() throws Exception { k3po.finish(); } @Test @Ignore("Did not fail and should") @Specification({ "request.version.not.http.1.1/handshake.request" }) public void shouldFailHandshakeWhenVersionNotHttp11() throws Exception { k3po.finish(); } @Test @Ignore("Did not fail and should") @Specification({ "request.header.host.missing/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderHostMissing() throws Exception { k3po.finish(); } @Test @Ignore("Gateway sending payload with 400, <html><head></head><body><h1>400 Websocket Upgrade Failure</h1></body></html>") @Specification({ "request.header.upgrade.missing/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderUpgradeMissing() throws Exception { k3po.finish(); } @Test @Ignore("Gateway sending payload with 404, <html><head></head><body><h1>404 Not Found</h1></body></html>") @Specification({ "request.header.upgrade.not.websocket/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderUpgradeNotWebSocket() throws Exception { k3po.finish(); } @Test @Ignore("Gateway sending payload with 400, <html><head></head><body><h1>400 Websocket Upgrade Failure</h1></body></html>") @Specification({ "request.header.connection.missing/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderConnectionMissing() throws Exception { k3po.finish(); } @Test @Ignore("Gateway sending payload with 400 Bad Request, <html><head></head><body><h1>400 Bad Request</h1></body></html>") @Specification({ "request.header.connection.not.upgrade/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderConnectionNotUpgrade() throws Exception { k3po.finish(); } @Test @Ignore("'disconnected', not 'read closed'") @Specification({ "request.header.sec.websocket.key.missing/handshake.request"}) public void shouldFailHandshakeWhenRequestHeaderSecWebSocketKeyMissing() throws Exception { k3po.finish(); } @Test @Ignore("java.lang.InterruptedException") @Specification({ "request.header.sec.websocket.key.not.16bytes.base64/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderSecWebSocketKeyNot16BytesBase64() throws Exception { k3po.finish(); } @Test @Ignore("'Missing HTTP header: Sec-WebSocket-Version', instead of 'read header ...'") @Specification({ "request.header.sec.websocket.version.not.13/handshake.request" }) public void shouldFailHandshakeWhenRequestHeaderSecWebSocketVersionNot13() throws Exception { k3po.finish(); } @Test @Specification({ "multiple.connections.established/handshake.requests" }) public void shouldEstablishMultipleConnections() throws Exception { k3po.finish(); } }
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java.abi.source; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.facebook.buck.jvm.java.testutil.compiler.CompilerTreeApiTestRunner; import com.facebook.buck.jvm.java.testutil.compiler.TestCompiler; import com.google.common.base.Joiner; import java.io.IOException; import java.util.Collections; import java.util.Set; import java.util.function.BiFunction; import java.util.stream.Collectors; import javax.annotation.processing.Completion; import javax.annotation.processing.Messager; import javax.annotation.processing.ProcessingEnvironment; import javax.annotation.processing.Processor; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.SourceVersion; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.TypeElement; import javax.lang.model.util.Elements; import javax.tools.Diagnostic; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(CompilerTreeApiTestRunner.class) public class TreeBackedProcessorWrapperTest { @Rule public TestCompiler testCompiler = new TestCompiler(); private ProcessingEnvironment processingEnv; private Elements elements; private Messager messager; @Test public void testSourceAbiOptionPresent() throws IOException { runTestProcessor( (annotations, roundEnv) -> { assertTrue( Boolean.valueOf( processingEnv .getOptions() .getOrDefault("com.facebook.buck.java.generating_abi", "false"))); return false; }); } @Test public void testElementUtilsIsWrapped() throws IOException { runTestProcessor( (annotations, roundEnv) -> { assertTrue(processingEnv.getElementUtils() instanceof TreeBackedElements); return false; }); } @Test public void testTypeUtilsIsWrapped() throws IOException { runTestProcessor( (annotations, roundEnv) -> { assertTrue(processingEnv.getTypeUtils() instanceof TreeBackedTypes); return false; }); } @Test public void testAnnotationsAreWrapped() throws IOException { runTestProcessor( (annotations, roundEnv) -> { if (!roundEnv.processingOver()) { TreeBackedTypeElement annotationType = (TreeBackedTypeElement) elements.getTypeElement("com.example.buck.FooAnno"); assertThat(annotations, Matchers.contains(annotationType)); } else { assertThat(annotations, Matchers.empty()); } return false; }); } @Test public void testRootElementsAreWrapped() throws IOException { runTestProcessor( (annotations, roundEnv) -> { if (!roundEnv.processingOver()) { TreeBackedTypeElement annotationType = (TreeBackedTypeElement) elements.getTypeElement("com.example.buck.FooAnno"); TreeBackedTypeElement fooType = (TreeBackedTypeElement) elements.getTypeElement("com.example.buck.Foo"); assertThat( roundEnv.getRootElements(), Matchers.containsInAnyOrder(annotationType, fooType)); } else { assertThat(roundEnv.getRootElements(), Matchers.empty()); } return false; }); } @Test public void testGetElementsAnnotatedWith() throws IOException { runTestProcessor( (annotations, roundEnv) -> { TreeBackedTypeElement annotationType = (TreeBackedTypeElement) elements.getTypeElement("com.example.buck.FooAnno"); TreeBackedTypeElement fooType = (TreeBackedTypeElement) elements.getTypeElement("com.example.buck.Foo"); Set<? extends Element> annotatedElements = roundEnv.getElementsAnnotatedWith(annotationType); if (!roundEnv.processingOver()) { assertThat(annotatedElements, Matchers.contains(fooType)); } else { assertThat(annotatedElements, Matchers.empty()); } return false; }); } @Test public void testElementMessager() throws IOException { runTestProcessor( (annotations, roundEnv) -> { messager.printMessage( Diagnostic.Kind.ERROR, "Foo", elements.getTypeElement("com.example.buck.Foo")); return false; }); assertThat( testCompiler .getDiagnosticMessages() .stream() .map(message -> message.substring(message.indexOf("Foo.java"))) .collect(Collectors.toList()), Matchers.contains( Joiner.on('\n').join("Foo.java:3: error: Foo", "public class Foo {}", " ^"))); } private void runTestProcessor( BiFunction<Set<? extends TypeElement>, RoundEnvironment, Boolean> processMethod) throws IOException { testCompiler.useFrontendOnlyJavacTask(); testCompiler.addSourceFileContents( "Foo.java", "package com.example.buck;", "@FooAnno", "public class Foo {}", "@interface FooAnno {}"); testCompiler.setProcessors( Collections.singletonList( new Processor() { @Override public Set<String> getSupportedOptions() { return Collections.emptySet(); } @Override public Set<String> getSupportedAnnotationTypes() { return Collections.singleton("com.example.buck.FooAnno"); } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.RELEASE_8; } @Override public void init(ProcessingEnvironment processingEnv) { TreeBackedProcessorWrapperTest.this.processingEnv = processingEnv; elements = processingEnv.getElementUtils(); messager = processingEnv.getMessager(); } @Override public Iterable<? extends Completion> getCompletions( Element element, AnnotationMirror annotation, ExecutableElement member, String userText) { fail("Should never be called"); return null; } @Override public boolean process( Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { return processMethod.apply(annotations, roundEnv); } })); testCompiler.enter(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import javax.annotation.Generated; import org.apache.camel.ExchangePattern; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; import org.apache.camel.spi.ExceptionHandler; /** * The hazelcast-map component is used to access Hazelcast distributed map. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface HazelcastMapEndpointBuilderFactory { /** * Builder for endpoint consumers for the Hazelcast Map component. */ public interface HazelcastMapEndpointConsumerBuilder extends EndpointConsumerBuilder { default AdvancedHazelcastMapEndpointConsumerBuilder advanced() { return (AdvancedHazelcastMapEndpointConsumerBuilder) this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option is a: * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder defaultOperation( HazelcastOperation defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option will be converted to a * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder defaultOperation( String defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option is a: <code>com.hazelcast.core.HazelcastInstance</code> * type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder hazelcastInstance( Object hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option will be converted to a * <code>com.hazelcast.core.HazelcastInstance</code> type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder hazelcastInstance( String hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference name which can be used for hazelcast * endpoint. If you don't specify the instance reference, camel use the * default hazelcast instance from the camel-hazelcast instance. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder hazelcastInstanceName( String hazelcastInstanceName) { setProperty("hazelcastInstanceName", hazelcastInstanceName); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option is a: <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder reliable(boolean reliable) { setProperty("reliable", reliable); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option will be converted to a <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointConsumerBuilder reliable(String reliable) { setProperty("reliable", reliable); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: <code>boolean</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { setProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option will be converted to a <code>boolean</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder bridgeErrorHandler( String bridgeErrorHandler) { setProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Define the polling timeout of the Queue consumer in Poll mode. * * The option is a: <code>long</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder pollingTimeout( long pollingTimeout) { setProperty("pollingTimeout", pollingTimeout); return this; } /** * Define the polling timeout of the Queue consumer in Poll mode. * * The option will be converted to a <code>long</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder pollingTimeout( String pollingTimeout) { setProperty("pollingTimeout", pollingTimeout); return this; } /** * Define the Pool size for Queue Consumer Executor. * * The option is a: <code>int</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder poolSize(int poolSize) { setProperty("poolSize", poolSize); return this; } /** * Define the Pool size for Queue Consumer Executor. * * The option will be converted to a <code>int</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder poolSize(String poolSize) { setProperty("poolSize", poolSize); return this; } /** * Define the Queue Consumer mode: Listen or Poll. * * The option is a: * <code>org.apache.camel.component.hazelcast.queue.HazelcastQueueConsumerMode</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder queueConsumerMode( HazelcastQueueConsumerMode queueConsumerMode) { setProperty("queueConsumerMode", queueConsumerMode); return this; } /** * Define the Queue Consumer mode: Listen or Poll. * * The option will be converted to a * <code>org.apache.camel.component.hazelcast.queue.HazelcastQueueConsumerMode</code> type. * * Group: consumer */ default HazelcastMapEndpointConsumerBuilder queueConsumerMode( String queueConsumerMode) { setProperty("queueConsumerMode", queueConsumerMode); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder concurrentConsumers( int concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder concurrentConsumers( String concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder onErrorDelay( int onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder onErrorDelay( String onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder pollTimeout(int pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder pollTimeout( String pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder transacted( boolean transacted) { setProperty("transacted", transacted); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder transacted(String transacted) { setProperty("transacted", transacted); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder transferExchange( boolean transferExchange) { setProperty("transferExchange", transferExchange); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointConsumerBuilder transferExchange( String transferExchange) { setProperty("transferExchange", transferExchange); return this; } } /** * Advanced builder for endpoint consumers for the Hazelcast Map component. */ public interface AdvancedHazelcastMapEndpointConsumerBuilder extends EndpointConsumerBuilder { default HazelcastMapEndpointConsumerBuilder basic() { return (HazelcastMapEndpointConsumerBuilder) this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option is a: <code>org.apache.camel.spi.ExceptionHandler</code> * type. * * Group: consumer (advanced) */ default AdvancedHazelcastMapEndpointConsumerBuilder exceptionHandler( ExceptionHandler exceptionHandler) { setProperty("exceptionHandler", exceptionHandler); return this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option will be converted to a * <code>org.apache.camel.spi.ExceptionHandler</code> type. * * Group: consumer (advanced) */ default AdvancedHazelcastMapEndpointConsumerBuilder exceptionHandler( String exceptionHandler) { setProperty("exceptionHandler", exceptionHandler); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option is a: <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedHazelcastMapEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { setProperty("exchangePattern", exchangePattern); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option will be converted to a * <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedHazelcastMapEndpointConsumerBuilder exchangePattern( String exchangePattern) { setProperty("exchangePattern", exchangePattern); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointConsumerBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointConsumerBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointConsumerBuilder synchronous( boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointConsumerBuilder synchronous( String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint producers for the Hazelcast Map component. */ public interface HazelcastMapEndpointProducerBuilder extends EndpointProducerBuilder { default AdvancedHazelcastMapEndpointProducerBuilder advanced() { return (AdvancedHazelcastMapEndpointProducerBuilder) this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option is a: * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointProducerBuilder defaultOperation( HazelcastOperation defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option will be converted to a * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointProducerBuilder defaultOperation( String defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option is a: <code>com.hazelcast.core.HazelcastInstance</code> * type. * * Group: common */ default HazelcastMapEndpointProducerBuilder hazelcastInstance( Object hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option will be converted to a * <code>com.hazelcast.core.HazelcastInstance</code> type. * * Group: common */ default HazelcastMapEndpointProducerBuilder hazelcastInstance( String hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference name which can be used for hazelcast * endpoint. If you don't specify the instance reference, camel use the * default hazelcast instance from the camel-hazelcast instance. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default HazelcastMapEndpointProducerBuilder hazelcastInstanceName( String hazelcastInstanceName) { setProperty("hazelcastInstanceName", hazelcastInstanceName); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option is a: <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointProducerBuilder reliable(boolean reliable) { setProperty("reliable", reliable); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option will be converted to a <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointProducerBuilder reliable(String reliable) { setProperty("reliable", reliable); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Group: producer */ default HazelcastMapEndpointProducerBuilder lazyStartProducer( boolean lazyStartProducer) { setProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Group: producer */ default HazelcastMapEndpointProducerBuilder lazyStartProducer( String lazyStartProducer) { setProperty("lazyStartProducer", lazyStartProducer); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder concurrentConsumers( int concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder concurrentConsumers( String concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder onErrorDelay( int onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder onErrorDelay( String onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder pollTimeout(int pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder pollTimeout( String pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder transacted( boolean transacted) { setProperty("transacted", transacted); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder transacted(String transacted) { setProperty("transacted", transacted); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder transferExchange( boolean transferExchange) { setProperty("transferExchange", transferExchange); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointProducerBuilder transferExchange( String transferExchange) { setProperty("transferExchange", transferExchange); return this; } } /** * Advanced builder for endpoint producers for the Hazelcast Map component. */ public interface AdvancedHazelcastMapEndpointProducerBuilder extends EndpointProducerBuilder { default HazelcastMapEndpointProducerBuilder basic() { return (HazelcastMapEndpointProducerBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointProducerBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointProducerBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointProducerBuilder synchronous( boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointProducerBuilder synchronous( String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint for the Hazelcast Map component. */ public interface HazelcastMapEndpointBuilder extends HazelcastMapEndpointConsumerBuilder, HazelcastMapEndpointProducerBuilder { default AdvancedHazelcastMapEndpointBuilder advanced() { return (AdvancedHazelcastMapEndpointBuilder) this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option is a: * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointBuilder defaultOperation( HazelcastOperation defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * To specify a default operation to use, if no operation header has * been provided. * * The option will be converted to a * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * type. * * Group: common */ default HazelcastMapEndpointBuilder defaultOperation( String defaultOperation) { setProperty("defaultOperation", defaultOperation); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option is a: <code>com.hazelcast.core.HazelcastInstance</code> * type. * * Group: common */ default HazelcastMapEndpointBuilder hazelcastInstance( Object hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference which can be used for hazelcast * endpoint. * * The option will be converted to a * <code>com.hazelcast.core.HazelcastInstance</code> type. * * Group: common */ default HazelcastMapEndpointBuilder hazelcastInstance( String hazelcastInstance) { setProperty("hazelcastInstance", hazelcastInstance); return this; } /** * The hazelcast instance reference name which can be used for hazelcast * endpoint. If you don't specify the instance reference, camel use the * default hazelcast instance from the camel-hazelcast instance. * * The option is a: <code>java.lang.String</code> type. * * Group: common */ default HazelcastMapEndpointBuilder hazelcastInstanceName( String hazelcastInstanceName) { setProperty("hazelcastInstanceName", hazelcastInstanceName); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option is a: <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointBuilder reliable(boolean reliable) { setProperty("reliable", reliable); return this; } /** * Define if the endpoint will use a reliable Topic struct or not. * * The option will be converted to a <code>boolean</code> type. * * Group: common */ default HazelcastMapEndpointBuilder reliable(String reliable) { setProperty("reliable", reliable); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder concurrentConsumers( int concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * To use concurrent consumers polling from the SEDA queue. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder concurrentConsumers( String concurrentConsumers) { setProperty("concurrentConsumers", concurrentConsumers); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder onErrorDelay(int onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * Milliseconds before consumer continues polling after an error has * occurred. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder onErrorDelay(String onErrorDelay) { setProperty("onErrorDelay", onErrorDelay); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option is a: <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder pollTimeout(int pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * The timeout used when consuming from the SEDA queue. When a timeout * occurs, the consumer can check whether it is allowed to continue * running. Setting a lower value allows the consumer to react more * quickly upon shutdown. * * The option will be converted to a <code>int</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder pollTimeout(String pollTimeout) { setProperty("pollTimeout", pollTimeout); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder transacted(boolean transacted) { setProperty("transacted", transacted); return this; } /** * If set to true then the consumer runs in transaction mode, where the * messages in the seda queue will only be removed if the transaction * commits, which happens when the processing is complete. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder transacted(String transacted) { setProperty("transacted", transacted); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option is a: <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder transferExchange( boolean transferExchange) { setProperty("transferExchange", transferExchange); return this; } /** * If set to true the whole Exchange will be transfered. If header or * body contains not serializable objects, they will be skipped. * * The option will be converted to a <code>boolean</code> type. * * Group: seda */ default HazelcastMapEndpointBuilder transferExchange( String transferExchange) { setProperty("transferExchange", transferExchange); return this; } } /** * Advanced builder for endpoint for the Hazelcast Map component. */ public interface AdvancedHazelcastMapEndpointBuilder extends AdvancedHazelcastMapEndpointConsumerBuilder, AdvancedHazelcastMapEndpointProducerBuilder { default HazelcastMapEndpointBuilder basic() { return (HazelcastMapEndpointBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointBuilder basicPropertyBinding( boolean basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointBuilder basicPropertyBinding( String basicPropertyBinding) { setProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointBuilder synchronous( boolean synchronous) { setProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Group: advanced */ default AdvancedHazelcastMapEndpointBuilder synchronous( String synchronous) { setProperty("synchronous", synchronous); return this; } } /** * Proxy enum for * <code>org.apache.camel.component.hazelcast.HazelcastOperation</code> * enum. */ enum HazelcastOperation { put, delete, get, update, query, getAll, clear, putIfAbsent, allAll, removeAll, retainAll, evict, evictAll, valueCount, containsKey, containsValue, keySet, removevalue, increment, decrement, setvalue, destroy, compareAndSet, getAndAdd, add, offer, peek, poll, remainingCapacity, drainTo, removeIf, take, publish, readOnceHeal, readOnceTail, capacity; } /** * Proxy enum for * <code>org.apache.camel.component.hazelcast.queue.HazelcastQueueConsumerMode</code> enum. */ enum HazelcastQueueConsumerMode { listen, poll; } /** * Hazelcast Map (camel-hazelcast) * The hazelcast-map component is used to access Hazelcast distributed map. * * Category: cache,datagrid * Available as of version: 2.7 * Maven coordinates: org.apache.camel:camel-hazelcast * * Syntax: <code>hazelcast-map:cacheName</code> * * Path parameter: cacheName (required) * The name of the cache */ default HazelcastMapEndpointBuilder hazelcastMap(String path) { class HazelcastMapEndpointBuilderImpl extends AbstractEndpointBuilder implements HazelcastMapEndpointBuilder, AdvancedHazelcastMapEndpointBuilder { public HazelcastMapEndpointBuilderImpl(String path) { super("hazelcast-map", path); } } return new HazelcastMapEndpointBuilderImpl(path); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.services.ec2.model.transform.ResetInstanceAttributeRequestMarshaller; /** * <p> * Contains the parameters for ResetInstanceAttribute. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ResetInstanceAttributeRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<ResetInstanceAttributeRequest> { /** * <p> * The ID of the instance. * </p> */ private String instanceId; /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> */ private String attribute; /** * Default constructor for ResetInstanceAttributeRequest object. Callers should use the setter or fluent setter * (with...) methods to initialize the object after creating it. */ public ResetInstanceAttributeRequest() { } /** * Constructs a new ResetInstanceAttributeRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param instanceId * The ID of the instance. * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> */ public ResetInstanceAttributeRequest(String instanceId, String attribute) { setInstanceId(instanceId); setAttribute(attribute); } /** * Constructs a new ResetInstanceAttributeRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param instanceId * The ID of the instance. * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> */ public ResetInstanceAttributeRequest(String instanceId, InstanceAttributeName attribute) { setInstanceId(instanceId); setAttribute(attribute.toString()); } /** * <p> * The ID of the instance. * </p> * * @param instanceId * The ID of the instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The ID of the instance. * </p> * * @return The ID of the instance. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The ID of the instance. * </p> * * @param instanceId * The ID of the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public ResetInstanceAttributeRequest withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> * * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * @see InstanceAttributeName */ public void setAttribute(String attribute) { this.attribute = attribute; } /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> * * @return The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * @see InstanceAttributeName */ public String getAttribute() { return this.attribute; } /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> * * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see InstanceAttributeName */ public ResetInstanceAttributeRequest withAttribute(String attribute) { setAttribute(attribute); return this; } /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> * * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * @see InstanceAttributeName */ public void setAttribute(InstanceAttributeName attribute) { this.attribute = attribute.toString(); } /** * <p> * The attribute to reset. * </p> * <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * </important> * * @param attribute * The attribute to reset.</p> <important> * <p> * You can only reset the following attributes: <code>kernel</code> | <code>ramdisk</code> | * <code>sourceDestCheck</code>. To change an instance attribute, use <a>ModifyInstanceAttribute</a>. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see InstanceAttributeName */ public ResetInstanceAttributeRequest withAttribute(InstanceAttributeName attribute) { setAttribute(attribute); return this; } /** * This method is intended for internal use only. Returns the marshaled request configured with additional * parameters to enable operation dry-run. */ @Override public Request<ResetInstanceAttributeRequest> getDryRunRequest() { Request<ResetInstanceAttributeRequest> request = new ResetInstanceAttributeRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: ").append(getInstanceId()).append(","); if (getAttribute() != null) sb.append("Attribute: ").append(getAttribute()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ResetInstanceAttributeRequest == false) return false; ResetInstanceAttributeRequest other = (ResetInstanceAttributeRequest) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getAttribute() == null ^ this.getAttribute() == null) return false; if (other.getAttribute() != null && other.getAttribute().equals(this.getAttribute()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getAttribute() == null) ? 0 : getAttribute().hashCode()); return hashCode; } @Override public ResetInstanceAttributeRequest clone() { return (ResetInstanceAttributeRequest) super.clone(); } }
/* * Copyright (c) 2017 VMware Inc. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hillview.table; import com.google.gson.*; import org.hillview.dataset.api.IJson; import org.hillview.table.api.ContentsKind; import org.hillview.table.api.IAppendableColumn; import org.hillview.table.columns.BaseListColumn; import javax.annotation.Nullable; import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.security.InvalidParameterException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Predicate; /** * A schema is an ordering of the columns, plus a map from a column name to a column description. * Column names are case-sensitive. */ public class Schema implements IJson { static final long serialVersionUID = 1; private final LinkedHashMap<String, ColumnDescription> columns; /* * Read below about how these variables are mutated even * if the schema is supposed to be immutable. * cachedColumnNames is also used as a boolean flag. */ @Nullable private List<String> cachedColumnNames; @Nullable private List<ContentsKind> cachedKinds; @Nullable private List<ColumnDescription> cachedDescriptions; @Nullable private String sourceFile; /** * Canonical name for schema files. Most useful when there is only one * schema file expected in a folder. */ public static final String schemaFileName = "schema"; /** * Returns the first difference between the two schemas in a human-readable format. */ public String diff(Schema schema) { String prefix = ""; if (this.sourceFile != null && schema.sourceFile != null) prefix = this.sourceFile + " compared to " + schema.sourceFile + ": "; if (this.getColumnCount() != schema.getColumnCount()) return prefix + "column count differs: " + this.getColumnCount() + " vs. " + schema.getColumnCount(); List<ColumnDescription> cols = this.getColumnDescriptions(); List<ColumnDescription> ocols = schema.getColumnDescriptions(); for (int i=0; i < this.getColumnCount(); i++) { ColumnDescription coli = cols.get(i); ColumnDescription ocoli = ocols.get(i); if (coli.equals(ocoli)) continue; return prefix + "column " + i + " is different: " + coli + " vs. " + ocoli; } return prefix + "No difference"; } public static class Serializer implements JsonSerializer<Schema> { public JsonElement serialize(Schema schema, Type typeOfSchema, JsonSerializationContext context) { JsonArray result = new JsonArray(); for (String col: schema.columns.keySet()) { ColumnDescription cd = schema.getDescription(col); result.add(cd.toJsonTree()); } return result; } } public static class Deserializer implements JsonDeserializer<Schema> { /** * Legagy decoder of ColumnDescription. */ static class StringPair { String name = ""; String kind = ""; } public Schema deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { Schema result = new Schema(); for (JsonElement e: json.getAsJsonArray()) { StringPair sp = gsonInstance.fromJson(e, StringPair.class); if (sp == null) throw new RuntimeException( "Could not decode column description from json " + e.toString()); if (sp.kind.equals("Category")) sp.kind = "String"; ColumnDescription cd = new ColumnDescription( sp.name, ContentsKind.valueOf(sp.kind)); result.append(cd); } return result; } } public static Schema empty = new Schema(); public Schema() { this.columns = new LinkedHashMap<String, ColumnDescription>(); this.cachedColumnNames = null; this.cachedKinds = null; this.cachedDescriptions = null; } public Schema(List<ColumnDescription> descriptions) { this.columns = new LinkedHashMap<String, ColumnDescription>(descriptions.size()); this.cachedColumnNames = null; this.cachedKinds = null; this.cachedDescriptions = null; for (ColumnDescription d: descriptions) this.append(d); this.seal(); } /** * This method mutates the schema. It should only be used * when there is a single reference to the schema - since schemas * are supposed to be immutable. This should be essentially * only used during schema construction, before any other * references are available. * @param desc Column to append to the schema. */ public void append(final ColumnDescription desc) { if (this.cachedColumnNames != null) throw new RuntimeException("Mutating sealed schema"); desc.validate(); if (this.columns.containsKey(desc.name)) throw new InvalidParameterException("Column with name " + desc.name + " already exists"); this.columns.put(desc.name, desc); if (this.cachedColumnNames != null) throw new RuntimeException("Changing immutable schema"); } @SuppressWarnings("MethodDoesntCallSuperMethod") public Schema clone() { Schema result = new Schema(); for (ColumnDescription cd: this.getColumnDescriptions()) result.append(cd); return result; } public ColumnDescription getDescription(final String columnName) { return this.columns.get(columnName); } public int getColumnCount() { return this.columns.size(); } /** * This method is tricky: schemas are supposed to be immutable, * but this method mutates the schema. This must be thread-safe. * We rely on the users of the cached data to properly call seal(). */ private synchronized void seal() { if (this.cachedColumnNames != null) // This is a benign race return; List<String> cols = new ArrayList<String>(this.columns.size()); this.cachedKinds = new ArrayList<ContentsKind>(this.columns.size()); this.cachedDescriptions = new ArrayList<ColumnDescription>(this.columns.size()); for (Map.Entry<String, ColumnDescription> c: this.columns.entrySet()) { cols.add(c.getKey()); ColumnDescription desc = c.getValue(); this.cachedKinds.add(desc.kind); this.cachedDescriptions.add(desc); } // Important: this assignment must be made last this.cachedColumnNames = cols; } public List<String> getColumnNames() { if (this.cachedColumnNames == null) this.seal(); return this.cachedColumnNames; } public List<ContentsKind> getColumnKinds() { if (this.cachedColumnNames == null) this.seal(); assert this.cachedKinds != null; return this.cachedKinds; } public List<ColumnDescription> getColumnDescriptions() { if (this.cachedColumnNames == null) this.seal(); assert this.cachedDescriptions != null; return this.cachedDescriptions; } public boolean containsColumnName(String columnName) { return columns.containsKey(columnName); } /** * Check whether this column name already exists. If it does, change it to be * unique. * @param columnName Column name that we plan to add to the schema. * @return A column name based on this one which is unique. */ public String generateColumnName(String columnName) { if (!this.containsColumnName(columnName)) return columnName; int counter = 0; while (true) { String newName = columnName + " (" + counter + ")"; if (!this.containsColumnName(newName)) return newName; counter++; } } /** * Generates a new Schema that contains only the subset of columns contained in the subSchema. * The columns are ordered as in the original schema. */ public Schema project(Predicate<String> keepByName) { final Schema projection = new Schema(); this.columns.values().stream().filter(cd -> keepByName.test(cd.name)).forEach(projection::append); return projection; } /** * Generates a column name not already in the schema, starting from the supplied prefix. */ public String newColumnName(@Nullable String prefix) { if (prefix == null) prefix = "Column"; int i = 0; String name = prefix; while (true) { if (!this.columns.containsKey(name)) return name; name = prefix + i; ++i; } } @Override public String toString() { StringBuilder result = new StringBuilder(); String separator = ""; for (ColumnDescription c: this.columns.values()) { result.append(separator).append(c.toString()); separator = ", "; } return result.toString(); } @Override public boolean equals(final Object o) { if (this == o) return true; if ((o == null) || (getClass() != o.getClass())) return false; final Schema schema = (Schema) o; return this.columns.equals(schema.columns); } @Override public int hashCode() { return this.columns.hashCode(); } public ContentsKind getKind(final String colName){ return this.getDescription(colName).kind; } private static Schema fromJson(String json) { return IJson.gsonInstance.fromJson(json, Schema.class); } private static final ConcurrentHashMap<Path, Schema> loadedFiles = new ConcurrentHashMap<Path, Schema>(); public static Schema readFromJsonFile(Path file) { return loadedFiles.computeIfAbsent(file, p -> { try { String s = new String(Files.readAllBytes(p)); Schema result = Schema.fromJson(s); result.sourceFile = s; return result; } catch (Exception ex) { throw new RuntimeException(ex); } } ); } public static void purgeCache() { loadedFiles.clear(); } public void writeToJsonFile(Path file) { try { String text = this.toJson(); byte[] bytes = text.getBytes(StandardCharsets.UTF_8); Files.write(file, bytes, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } catch (Exception ex) { throw new RuntimeException(ex); } } public IAppendableColumn[] createAppendableColumns() { IAppendableColumn[] cols = new IAppendableColumn[this.getColumnCount()]; int index = 0; for (ColumnDescription cd: this.columns.values()) { IAppendableColumn col = BaseListColumn.create(cd); cols[index++] = col; } return cols; } }
package net.nextpulse.jadmin.dao; import com.google.common.base.Joiner; import net.nextpulse.jadmin.ColumnDefinition; import net.nextpulse.jadmin.FormPostEntry; import org.apache.commons.dbutils.BasicRowProcessor; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; /** * DAO implementation for resources backed by a SQL database. * * @author yholkamp */ public class GenericSQLDAO extends AbstractDAO { private static final Logger logger = LogManager.getLogger(); private final String tableName; private DataSource dataSource; public GenericSQLDAO(DataSource dataSource, String tableName) { this.dataSource = dataSource; this.tableName = tableName; } /** * @param keys primary key(s) * @return either an empty optional or one holding a DatabaseEntry matching the keys * @throws DataAccessException if an error occurs while accessing the database. */ @Override public Optional<DatabaseEntry> selectOne(Object[] keys) throws DataAccessException { logger.trace("Selecting one {}", tableName); Map<String, Object> editedObject = null; try(Connection conn = dataSource.getConnection()) { String conditions = resourceSchemaProvider.getKeyColumns().stream() .map(x -> String.format("%s = ?", x.getName())) .reduce((s, s2) -> s + " AND " + s2) .orElseThrow(() -> new DataAccessException("Could not generate SQL condition")); PreparedStatement statement = conn.prepareStatement(String.format("SELECT * FROM %s WHERE %s LIMIT 1", tableName, conditions)); for(int i = 1; i <= resourceSchemaProvider.getKeyColumns().size(); i++) { ColumnDefinition columnDefinition = resourceSchemaProvider.getKeyColumns().get(i - 1); setValue(statement, i, (String) keys[i - 1], columnDefinition, columnDefinition.getName()); } logger.debug("Executing statement {}", statement.toString()); ResultSet results = statement.executeQuery(); if(results.next()) { editedObject = new BasicRowProcessor().toMap(results); } } catch(SQLException e) { logger.error("Exception occurred while executing"); throw new DataAccessException(e); } return editedObject == null ? Optional.empty() : Optional.of(DatabaseEntry.buildFrom(editedObject)); } /** * @param offset number of objects to skip * @param count number of objects to retrieve * @param sortColumn column to sort the values by * @param sortDirection direction to sort, true for ascending, false for descending * @return list of entries of up to count long * @throws DataAccessException if an error occurs while accessing the database. */ @Override public List<DatabaseEntry> selectMultiple(long offset, long count, String sortColumn, boolean sortDirection) throws DataAccessException { logger.trace("Selecting multiple {}, {} offset, {} count", tableName, offset, count); List<DatabaseEntry> rows = new ArrayList<>(); try(Connection conn = dataSource.getConnection()) { // TODO: only select columns that are displayed or part of the primary key String sorting = sortDirection ? "asc" : "desc"; String query = String.format("SELECT * FROM %s ORDER BY %s %s LIMIT %d OFFSET %d", tableName, sortColumn, sorting, count, offset); logger.trace("Formatted selectMultiple query: {}", query); PreparedStatement statement = conn.prepareStatement(query); ResultSet results = statement.executeQuery(); while(results.next()) { Map<String, Object> row = new BasicRowProcessor().toMap(results); rows.add(DatabaseEntry.buildFrom(row)); } } catch(SQLException e) { throw new DataAccessException(e); } return rows; } /** * @param postEntry unfiltered user submitted data, must be used with caution * @throws DataAccessException if an error occurs while accessing the database. */ @Override public void insert(FormPostEntry postEntry) throws DataAccessException { logger.trace("Inserting a new {}", tableName); try(Connection conn = dataSource.getConnection()) { // construct the SQL query String query = createInsertStatement(postEntry); PreparedStatement statement = conn.prepareStatement(query); int index = 1; for(String columnName : postEntry.getKeyValues().keySet()) { setValue(statement, index++, postEntry.getKeyValues().get(columnName), getColumnDefinitions().get(columnName), columnName); } for(String columnName : postEntry.getValues().keySet()) { setValue(statement, index++, postEntry.getValues().get(columnName), getColumnDefinitions().get(columnName), columnName); } logger.debug("Prepared statement SQL: {}", query); int updatedRows = statement.executeUpdate(); if(updatedRows != 1) { throw new SQLException("Updated " + updatedRows + ", expected 1"); } } catch(SQLException e) { throw new DataAccessException(e); } } /** * @param postEntry unfiltered user submitted data, must be used with caution * @throws DataAccessException if an error occurs while accessing the database. */ @Override public void update(FormPostEntry postEntry) throws DataAccessException { logger.trace("Updating an existing {}", tableName); try(Connection conn = dataSource.getConnection()) { // construct the SQL query String query = createUpdateQuery(postEntry); logger.debug("Prepared statement SQL: {}", query); PreparedStatement statement = conn.prepareStatement(query); int index = 1; // first bind the SET field = ? portion for(String columnName : postEntry.getValues().keySet()) { setValue(statement, index++, postEntry.getValues().get(columnName), getColumnDefinitions().get(columnName), columnName); } // and next the WHERE field = ? part for(String columnName : postEntry.getKeyValues().keySet()) { setValue(statement, index++, postEntry.getKeyValues().get(columnName), getColumnDefinitions().get(columnName), columnName); } logger.debug("Query: {}", statement.toString()); int updatedRows = statement.executeUpdate(); if(updatedRows != 1) { throw new SQLException("Updated " + updatedRows + ", expected 1"); } } catch(SQLException e) { throw new DataAccessException(e); } } /** * Returns the number of entries in the database of the resource. * * @return number of entries * @throws DataAccessException if an SQL exception occurred */ @Override public int count() throws DataAccessException { try(Connection conn = dataSource.getConnection()) { PreparedStatement statement = conn.prepareStatement(String.format("SELECT COUNT(*) FROM %s", tableName)); ResultSet results = statement.executeQuery(); results.next(); return results.getInt(1); } catch(SQLException e) { throw new DataAccessException(e); } } @Override public void delete(Object... keys) throws DataAccessException { logger.trace("Updating an existing {}", tableName); try(Connection conn = dataSource.getConnection()) { // construct the SQL query String conditions = resourceSchemaProvider.getKeyColumns().stream() .map(x -> String.format("%s = ?", x.getName())) .reduce((s, s2) -> s + " AND " + s2) .orElseThrow(() -> new DataAccessException("Could not generate SQL condition")); PreparedStatement statement = conn.prepareStatement(String.format("DELETE FROM %s WHERE %s", tableName, conditions)); for(int i = 1; i <= resourceSchemaProvider.getKeyColumns().size(); i++) { ColumnDefinition columnDefinition = resourceSchemaProvider.getKeyColumns().get(i - 1); setValue(statement, i, (String) keys[i - 1], columnDefinition, columnDefinition.getName()); } logger.debug("Executing statement {}", statement.toString()); boolean results = statement.execute(); } catch(SQLException e) { throw new DataAccessException(e); } } /** * Creates an SQL update query for the provided postEntry. * * @param postEntry object to construct the update query for * @return update query with unbound parameters */ protected String createUpdateQuery(FormPostEntry postEntry) { String wherePortion = postEntry.getKeyValues().keySet().stream() .map(x -> x + " = ?") .reduce((s, s2) -> s + " AND " + s2).orElse(""); String setPortion = postEntry.getValues().keySet().stream() .map(x -> x + " = ?") .reduce((s, s2) -> s + "," + s2).orElse(""); return String.format("UPDATE %s SET %s WHERE %s", tableName, setPortion, wherePortion); } /** * Creates an SQL insert query for the provided postEntry. * * @param postEntry object to construct the insert query for * @return insert query with unbound parameters */ protected String createInsertStatement(FormPostEntry postEntry) { // obtain a list of all resource columns present in the post data List<String> columnSet = new ArrayList<>(postEntry.getKeyValues().keySet()); columnSet.addAll(postEntry.getValues().keySet()); String parameters = Joiner.on(",").join(Collections.nCopies(columnSet.size(), "?")); String parameterString = Joiner.on(",").join(columnSet); return String.format("INSERT INTO %s (%s) VALUES (%s)", tableName, parameterString, parameters); } /** * Query updater that attempts to use the most specific setX method based on the provided input. * * @param statement statement to fill * @param index index of the parameter to configure * @param value user-provided value * @param columnDefinition column definition, used to obtain type information * @param columnName name of the column being set * @throws DataAccessException exception that may be thrown by {@link PreparedStatement#setObject(int, Object)} and others */ protected void setValue(PreparedStatement statement, int index, String value, ColumnDefinition columnDefinition, String columnName) throws DataAccessException { if(columnDefinition == null) { throw new DataAccessException("Found no column definition for column " + columnName + ", value " + value); } try { if(StringUtils.isEmpty(value)) { // TODO: use setNull here logger.trace("Setting null for column {}", columnDefinition.getName()); statement.setObject(index, null); } else { switch(columnDefinition.getType()) { case integer: statement.setInt(index, Integer.valueOf(value)); break; case bool: statement.setBoolean(index, Boolean.valueOf(value)); break; case datetime: // TODO: handle input-to-date conversion SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); try { Date date = format.parse(value); statement.setDate(index, new java.sql.Date(date.getTime())); } catch(ParseException e) { logger.error("Could not parse the provided datetime string: {}", value, e); } break; case string: case text: statement.setString(index, value); break; default: logger.error("Unsupported column definition type {} found, setting without type checking", columnDefinition.getType()); statement.setObject(index, value); break; } } } catch(SQLException e) { logger.error("Could not set {}.{} (type {}) to {}", tableName, columnDefinition.getName(), columnDefinition.getType(), value); throw new DataAccessException(e); } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.edgent.test.providers.direct; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import org.apache.edgent.execution.Configs; import org.apache.edgent.execution.Job; import org.apache.edgent.graph.Vertex; import org.apache.edgent.oplet.Oplet; import org.apache.edgent.oplet.core.PeriodicSource; import org.apache.edgent.oplet.core.Pipe; import org.apache.edgent.providers.direct.DirectProvider; import org.apache.edgent.test.topology.TopologyAbstractTest; import org.apache.edgent.topology.TStream; import org.apache.edgent.topology.Topology; import org.junit.Test; import com.google.gson.JsonObject; public class DirectJobTest extends DirectTopologyTestBase { @Test public void jobName0() throws Exception { String[] data = new String[] {}; String topologyName = "topoName"; Topology t = newTopology(topologyName); t.strings(data); Job job = awaitCompleteExecution(t); assertTrue(job.getName().startsWith(topologyName)); } @Test public void jobName1() throws Exception { String[] data = new String[] {}; String topologyName = "topoName"; Topology t = newTopology(topologyName); t.strings(data); JsonObject config = new JsonObject(); config.addProperty(Configs.JOB_NAME, (String)null); Job job = awaitCompleteExecution(t, config); assertTrue(job.getName().startsWith(topologyName)); } @Test public void jobName2() throws Exception { String[] data = new String[] {}; String jobName = "testJob"; Topology t = newTopology(); t.strings(data); JsonObject config = new JsonObject(); config.addProperty(Configs.JOB_NAME, jobName); Job job = awaitCompleteExecution(t, config); assertEquals(jobName, job.getName()); } @Test public void jobDone0() throws Exception { String[] data = new String[] {}; Topology t = newTopology(); @SuppressWarnings("unused") TStream<String> s = t.strings(data); Job job = awaitCompleteExecution(t); assertEquals("job.getCurrentState() must be RUNNING", Job.State.RUNNING, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); job.stateChange(Job.Action.CLOSE); assertEquals("job.getCurrentState() must be CLOSED", Job.State.CLOSED, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } @Test public void jobDone1() throws Exception { String[] data = new String[] {"a", "b", "c"}; Topology t = newTopology(); @SuppressWarnings("unused") TStream<String> s = t.strings(data); Job job = awaitCompleteExecution(t); assertEquals("job.getCurrentState() must be RUNNING", Job.State.RUNNING, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); job.stateChange(Job.Action.CLOSE); assertEquals("job.getCurrentState() must be CLOSED", Job.State.CLOSED, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } @Test public void jobDone2() throws Exception { final int NUM_TUPLES = 1000000; Integer[] data = new Integer[NUM_TUPLES]; AtomicInteger numTuples = new AtomicInteger(); for (int i = 0; i < data.length; i++) { data[i] = new Integer(i); } Topology t = newTopology(); TStream<Integer> ints = t.collection(Arrays.asList(data)); ints.sink(tuple -> numTuples.incrementAndGet()); Job job = awaitCompleteExecution(t); Thread.sleep(1500); // wait for numTuples visibility assertEquals(NUM_TUPLES, numTuples.get()); assertEquals("job.getCurrentState() must be RUNNING", Job.State.RUNNING, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); job.stateChange(Job.Action.CLOSE); assertEquals("job.getCurrentState() must be CLOSED", Job.State.CLOSED, job.getCurrentState()); assertEquals("job.getCurrentState() must be HEALTHY", Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } @Test public void jobPeriodicSource() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); @SuppressWarnings("unused") TStream<Integer> ints = t.poll(() -> n.incrementAndGet(), 100, TimeUnit.MILLISECONDS); Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); Thread.sleep(TimeUnit.SECONDS.toMillis(2)); assertTrue(n.get() > 0); // At least one tuple was processed job.stateChange(Job.Action.CLOSE); assertEquals(Job.State.CLOSED, job.getCurrentState()); } @Test public void jobPeriodicSourceCancellation() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); @SuppressWarnings("unused") TStream<Integer> ints = t.poll(() -> n.incrementAndGet(), 500, TimeUnit.MILLISECONDS); // Get the source oplet Collection<Vertex<? extends Oplet<?, ?>, ?, ?>> vertices = t.graph().getVertices(); PeriodicSource<?> src = null; for (Vertex<? extends Oplet<?, ?>, ?, ?> v : vertices) { Oplet<?,?> op = v.getInstance(); assertTrue(op instanceof PeriodicSource); src = (PeriodicSource<?>) op; assertEquals(500, src.getPeriod()); assertSame(TimeUnit.MILLISECONDS, src.getUnit()); } // Submit job Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); Thread.sleep(TimeUnit.SECONDS.toMillis(1)); int tupleCount = n.get(); assertTrue("Expected more tuples than "+ tupleCount, tupleCount > 0); // At least one tuple was processed assertEquals(Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); // Changing the period cancels the source's task and schedules new one src.setPeriod(100); Thread.sleep(TimeUnit.SECONDS.toMillis(1)); // More tuples processed after resetting the period assertTrue("Expected more tuples than "+ n.get(), n.get() > 3*tupleCount); job.stateChange(Job.Action.CLOSE); assertEquals(Job.State.CLOSED, job.getCurrentState()); assertEquals(Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } @Test public void jobProcessSource() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); @SuppressWarnings("unused") TStream<Integer> ints = t.generate(() -> n.incrementAndGet()); Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); assertEquals(Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); Thread.sleep(TimeUnit.SECONDS.toMillis(2)); assertTrue(n.get() > 0); // At least one tuple was processed job.stateChange(Job.Action.CLOSE); assertEquals(Job.State.CLOSED, job.getCurrentState()); assertEquals(Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } @Test(expected = TimeoutException.class) public void jobTimesOut() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); @SuppressWarnings("unused") TStream<Integer> ints = t.poll(() -> n.incrementAndGet(), 100, TimeUnit.MILLISECONDS); Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); try { job.complete(700, TimeUnit.MILLISECONDS); } finally { assertTrue(n.get() > 0); // At least one tuple was processed assertEquals(Job.State.RUNNING, job.getCurrentState()); assertEquals(Job.Health.HEALTHY, job.getHealth()); assertEquals("", job.getLastError()); } } @Test(expected = ExecutionException.class) public void jobPeriodicSourceError() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); TStream<Integer> ints = t.poll(() -> n.incrementAndGet(), 100, TimeUnit.MILLISECONDS); ints.pipe(new FailedOplet<Integer>(5, 0)); Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); try { job.complete(10, TimeUnit.SECONDS); } finally { // RUNNING even though execution error assertEquals(Job.State.RUNNING, job.getCurrentState()); assertEquals(Job.Health.UNHEALTHY, job.getHealth()); assertEquals("java.lang.RuntimeException: Expected Test Exception", job.getLastError()); } } @Test(expected = ExecutionException.class) public void jobProcessSourceError() throws Exception { Topology t = newTopology(); AtomicInteger n = new AtomicInteger(0); TStream<Integer> ints = t.generate(() -> n.incrementAndGet()); ints.pipe(new FailedOplet<Integer>(12, 100)); Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t); Job job = fj.get(); assertEquals(Job.State.RUNNING, job.getCurrentState()); try { job.complete(10, TimeUnit.SECONDS); } finally { // RUNNING even though execution error assertEquals(Job.State.RUNNING, job.getCurrentState()); assertEquals(Job.Health.UNHEALTHY, job.getHealth()); assertEquals("java.lang.RuntimeException: Expected Test Exception", job.getLastError()); } } private Job awaitCompleteExecution(Topology t) throws InterruptedException, ExecutionException { return awaitCompleteExecution(t, null); } private Job awaitCompleteExecution(Topology t, JsonObject config) throws InterruptedException, ExecutionException { Future<Job> fj = ((DirectProvider)getTopologyProvider()).submit(t, config); Job job = fj.get(); job.complete(); return job; } /** * Test oplet which fails after receiving a given number of tuples. * @param <T> */ @SuppressWarnings("serial") private static class FailedOplet<T> extends Pipe<T,T> { private final int threshold; private final int sleepMillis; /** * Create test oplet. * * @param afterTuples number of tuples to receive before failing * @param sleepMillis milliseconds of sleep before processing each tuple */ public FailedOplet(int afterTuples, int sleepMillis) { if (afterTuples < 0) throw new IllegalArgumentException("afterTuples="+afterTuples); if (sleepMillis < 0) throw new IllegalArgumentException("sleepMillis="+sleepMillis); this.threshold = afterTuples; this.sleepMillis = sleepMillis; } @Override public void close() throws Exception { } @Override public void accept(T tuple) { if (sleepMillis > 0) { sleep(sleepMillis); } injectError(threshold); submit(tuple); } private AtomicInteger count = new AtomicInteger(0); protected void injectError(int errorAt) { if (count.getAndIncrement() == errorAt) throw new RuntimeException("Expected Test Exception"); } protected static void sleep(long millis) { try { Thread.sleep(millis); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } }
package io.supercharge.sccameraview; import android.content.Context; import android.hardware.Camera; import android.media.CamcorderProfile; import android.media.MediaRecorder; import android.util.Log; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * Based on https://github.com/googlesamples/android-MediaRecorder */ public class Camera1View extends BaseCameraView { protected Camera camera; private MediaRecorder mediaRecorder; private Camera.Size videoSize; private String cameraFlashMode; private String cameraFocusMode; public Camera1View(Context context) { super(context); } @Override public void startPreview() { if (isAvailable()) { openCamera(); } else { setSurfaceTextureListener(surfaceTextureListener); } } @Override public void stopPreview() { releaseMediaRecorder(); releaseCamera(); } @Override void openCamera() { cameraId = getDefaultCameraId(); camera = Camera.open(cameraId); Camera.Parameters parameters = camera.getParameters(); Camera.Size previewSize = ratioSizeList.get(selectedRatioIdx).getSize(camera); videoSize = ratioSizeList.get(selectedRatioIdx).getSize(camera); parameters.setPreviewSize(previewSize.width, previewSize.height); parameters.setPictureSize(previewSize.width, previewSize.height); if (parameters.getSupportedFlashModes() != null && parameters.getSupportedFlashModes().contains(cameraFlashMode)) { parameters.setFlashMode(cameraFlashMode); } if (parameters.getSupportedFocusModes() != null && parameters.getSupportedFocusModes().contains(cameraFocusMode)) { parameters.setFocusMode(cameraFocusMode); } parameters.setRotation(ORIENTATION_270); camera.setDisplayOrientation(ORIENTATION_90); camera.setParameters(parameters); try { camera.setPreviewTexture(getSurfaceTexture()); camera.startPreview(); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage()); } } @Override public void takePicture() { camera.takePicture(null, null, null, new Camera.PictureCallback() { @Override public void onPictureTaken(byte[] imageData, Camera camera) { releaseCamera(); saveImage(imageData); } }); } @Override public void changeAspectRatio(int position) { if(!ratioSizeList.isEmpty()) { ASPECT_RATIO = ratioSizeList.get(position).getRatio(); } } public void switchCamera() { stopPreview(); frontFacingCameraActive = !frontFacingCameraActive; ratioSizeList = new ArrayList<>(); loadAspectRatios(); } protected void releaseCamera() { if (camera != null) { camera.release(); camera = null; } } private void releaseMediaRecorder() { if (mediaRecorder != null) { // clear recorder configuration mediaRecorder.reset(); // release the recorder object mediaRecorder.release(); mediaRecorder = null; if (camera != null) { // Lock camera for later use i.e taking it back from MediaRecorder. // MediaRecorder doesn't need it anymore and we will release it if the activity pauses. camera.lock(); } } } @Override public void startRecordingVideo() { if (prepareVideoRecorder()) { mediaRecorder.start(); recordingVideo = true; } else { releaseMediaRecorder(); } } private boolean prepareVideoRecorder() { mediaRecorder = new MediaRecorder(); Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(cameraId, info); if (info.orientation == SENSOR_ORIENTATION_DEFAULT_DEGREES) { mediaRecorder.setOrientationHint(ORIENTATION_90); } else { mediaRecorder.setOrientationHint(ORIENTATION_270); } // Step 1: Unlock and set camera to MediaRecorder camera.unlock(); mediaRecorder.setCamera(camera); // Step 2: Set sources mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); // Step 3: Set a CamcorderProfile (requires API Level 8 or higher) CamcorderProfile profile = getCamcorderProfile(); profile.videoFrameWidth = videoSize.width; profile.videoFrameHeight = videoSize.height; mediaRecorder.setProfile(profile); mediaRecorder.setVideoEncodingBitRate(BITRATE); // Step 4: Set output file mediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO).toString()); // Step 5: Prepare configured MediaRecorder try { mediaRecorder.prepare(); } catch (IllegalStateException e) { Log.e(LOG_TAG, e.getMessage()); releaseMediaRecorder(); return false; } catch (IOException e) { Log.e(LOG_TAG, e.getMessage()); releaseMediaRecorder(); return false; } return true; } @SuppressWarnings("checkstyle:illegalcatch") @Override public void stopRecordingVideo() { try { mediaRecorder.stop(); // stop the recording } catch (RuntimeException e) { // RuntimeException is thrown when stop() is called immediately after start(). // In this case the output file is not properly constructed ans should be deleted. Log.e(LOG_TAG, e.getMessage()); } releaseMediaRecorder(); // release the MediaRecorder object camera.lock(); // take camera access back from MediaRecorder releaseCamera(); recordingVideo = false; startPreview(); } private int getDefaultCameraId() { int position = frontFacingCameraActive ? Camera.CameraInfo.CAMERA_FACING_FRONT : Camera.CameraInfo.CAMERA_FACING_BACK; // Find the total number of cameras available int numberOfCameras = Camera.getNumberOfCameras(); // Find the ID of the back-facing ("default") camera Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { Camera.getCameraInfo(i, cameraInfo); if (cameraInfo.facing == position) { return position; } } return -1; } @Override public void collectRatioSizes() { ratioSizeList.clear(); camera = Camera.open(getDefaultCameraId()); List<Camera.Size> previewSizes = camera.getParameters().getSupportedPreviewSizes(); if (previewSizes != null) { List<Double> ratioList = new ArrayList<>(); for (Camera.Size size : previewSizes) { double ratio = (double) size.width / (double) size.height; if (!ratioList.contains(ratio)) { ratioList.add(ratio); ratioSizeList.add(new AspectRatio(ratio, size.width, size.height)); } } } } public void setCameraFlashMode(String cameraFlashMode) { this.cameraFlashMode = cameraFlashMode; } public void setCameraFocusMode(String cameraFocusMode) { this.cameraFocusMode = cameraFocusMode; } }
/* * Copyright 2012 Cisco Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.karaf.commands; import java.io.PrintWriter; import java.text.DateFormat; import java.util.Date; import java.util.List; import org.activiti.engine.HistoryService; import org.activiti.engine.ProcessEngine; import org.activiti.engine.RepositoryService; import org.activiti.engine.RuntimeService; import org.activiti.engine.history.HistoricProcessInstance; import org.activiti.engine.repository.Deployment; import org.activiti.engine.repository.ProcessDefinition; import org.activiti.engine.runtime.Execution; import org.activiti.engine.runtime.ProcessInstance; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; /** * * @author Srinivasan Chikkala */ @Command(scope = "act", name = "list", description = "Displays information about BPMN active process instances, process definitions, history of process instances") public class ListBPMCommand extends BPMCommand { @Option(name = "-pi", aliases = "--active", description = "Display information about all active process instances") private boolean active; @Option(name = "-pd", aliases = "--definitions", description = "Display information about all process definitions") private boolean definitions; @Option(name = "-h", aliases = "--history", description = "Display information about history of all process instances") private boolean history; @Option(name = "-d", aliases = "--deployments", description = "Display information about all BPMN deployments") private boolean deployments; @Override protected Object doExecute() throws Exception { ProcessEngine pe = this.getProcessEngine(); PrintWriter out = new PrintWriter(System.out, true); if (pe == null) { out.println("Process Engine NOT Found!"); return null; } if (!(this.active || this.definitions || this.history || this.deployments)) { // none of them set, display everything // set all to true; this.active = this.definitions = this.history = this.deployments = true; } if (this.deployments) { RepositoryService repo = pe.getRepositoryService(); printBPMNDeployments(out, repo); } if (this.definitions) { RepositoryService repo = pe.getRepositoryService(); printProcessDefinitions(out, repo); } if (this.history) { HistoryService his = pe.getHistoryService(); boolean printActive = !this.active; // if we show active process, dont print then in history printHistoricProcessInstances(out, his, printActive); } if (this.active) { RuntimeService rt = pe.getRuntimeService(); printActiveProcessInstances(out, rt); } return null; } private String formatDate(Date date) { String dateTxt = ""; if (date != null) { dateTxt = DateFormat.getDateTimeInstance().format(date); } return dateTxt; } private String formatBpmResource(String bpmResource) { if (bpmResource.startsWith("bundleresource:")) { return bpmResource.substring("bundleresource:".length()); } else { return bpmResource; } } private void printBPMNDeployments(PrintWriter out, RepositoryService repo) { List<Deployment> depList = repo.createDeploymentQuery().orderByDeploymenTime().asc().list(); out.println(); out.println("BPMN Deployments"); out.println("----------------"); if (depList.isEmpty()) { out.println("No BPMN Deployments Found."); return; } TextTable txtTable = new TextTable(3); txtTable.addHeaders("ID", "Name", "Deployment Time"); for (Deployment dep : depList) { txtTable.addRow(dep.getId(), dep.getName(), formatDate(dep.getDeploymentTime())); } txtTable.print(out); } private void printProcessDefinitions(PrintWriter out, RepositoryService repo) { // RepositoryService repo = pe.getRepositoryService(); List<ProcessDefinition> pdList = repo.createProcessDefinitionQuery().orderByDeploymentId().asc().list(); out.println(); out.println("BPMN Process Definitions"); out.println("-------------------------"); if (pdList.isEmpty()) { out.println("No BPMN Process Defintions Found."); return; } TextTable txtTable = new TextTable(4); txtTable.addHeaders("Definition ID", "Name", "Ver", "Resource"); for (ProcessDefinition pd : pdList) { Integer ver = pd.getVersion(); txtTable.addRow(pd.getId(), pd.getName(), ver.toString(), formatBpmResource(pd.getResourceName())); } txtTable.print(out); } private String getExecutions(RuntimeService rt, String pi) { List<Execution> executions = rt.createExecutionQuery() .processInstanceId(pi) .orderByProcessInstanceId().asc().list(); StringBuilder bld = new StringBuilder(); boolean first = true; for (Execution exec : executions) { if (!first) { bld.append(","); } else { first = false; } bld.append(exec.getId()); } return bld.toString(); } private void printActiveProcessInstances(PrintWriter out, RuntimeService rt) { List<ProcessInstance> piList = rt.createProcessInstanceQuery().orderByProcessInstanceId().asc().list(); out.println(); out.println("Active BPMN Process Instances"); out.println("-----------------------------"); if (piList.isEmpty()) { out.println("No Active BPMN Process Instances Found."); return; } TextTable txtTable = new TextTable(3); txtTable.addHeaders("Definition ID", "Instance ID", "Executions"); for (ProcessInstance pi : piList) { txtTable.addRow(pi.getProcessDefinitionId(), pi.getProcessInstanceId(), getExecutions(rt,pi.getProcessInstanceId())); } txtTable.print(out); } private void printHistoricProcessInstances(PrintWriter out, HistoryService his, boolean printActive) { List<HistoricProcessInstance> hpiList = his.createHistoricProcessInstanceQuery().orderByProcessDefinitionId().asc().list(); out.println(); out.println("History of BPMN Process Instances"); out.println("---------------------------------"); if (hpiList.isEmpty()) { out.println("No History on BPMN Processes."); return; } TextTable txtTable = new TextTable(4); txtTable.addHeaders("Definition ID", "Instance ID", "Start Time", "End Time"); for (HistoricProcessInstance hpi : hpiList) { Date endTime = hpi.getEndTime(); if (endTime == null && !printActive) { continue; // dont print active instance history if printActive is false - default. } txtTable.addRow(hpi.getProcessDefinitionId(), hpi.getId(), formatDate(hpi.getStartTime()), formatDate(hpi.getEndTime())); } txtTable.print(out); } }
package com.planet_ink.coffee_mud.Abilities.Druid; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.TrackingLibrary; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings({"unchecked","rawtypes"}) public class Chant_FindMate extends Chant { @Override public String ID() { return "Chant_FindMate"; } private final static String localizedName = CMLib.lang()._("Find Mate"); @Override public String name() { return localizedName; } protected String displayText=_("(Tracking a mate)"); @Override public String displayText(){ return displayText;} @Override protected int canAffectCode(){return CAN_MOBS;} @Override protected int canTargetCode(){return CAN_MOBS;} @Override public int classificationCode(){return Ability.ACODE_CHANT|Ability.DOMAIN_BREEDING;} @Override public int abstractQuality(){return Ability.QUALITY_OK_OTHERS;} @Override public long flags(){return Ability.FLAG_TRACKING;} protected List<Room> theTrail=null; public int nextDirection=-2; @Override public boolean tick(Tickable ticking, int tickID) { if(!super.tick(ticking,tickID)) return false; if(tickID==Tickable.TICKID_MOB) { if((theTrail==null) ||(affected == null) ||(!(affected instanceof MOB))) return false; final MOB mob=(MOB)affected; if(mob.location()!=null) { MOB mate=null; for(int i=0;i<mob.location().numInhabitants();i++) { final MOB M=mob.location().fetchInhabitant(i); if(isSuitableMate(M,mob)) { mate=M; break;} } if(mate!=null) { mob.tell(_("You peer longingly at @x1.",mate.name())); Item I=mob.fetchFirstWornItem(Wearable.WORN_WAIST); if(I!=null) CMLib.commands().postRemove(mob,I,false); I=mob.fetchFirstWornItem(Wearable.WORN_LEGS); if(I!=null) CMLib.commands().postRemove(mob,I,false); if((mob.fetchFirstWornItem(Wearable.WORN_WAIST)!=null) ||(mob.fetchFirstWornItem(Wearable.WORN_LEGS)!=null)) unInvoke(); mob.doCommand(CMParms.parse("MATE \""+mate.name()+"$\""),Command.METAFLAG_FORCED); unInvoke(); } } if(nextDirection==-999) return true; if(nextDirection==999) { mob.tell(_("Your yearning for a mate seems to fade.")); nextDirection=-2; unInvoke(); } else if(nextDirection==-1) { mob.tell(_("You no longer want to continue.")); nextDirection=-999; unInvoke(); } else if(nextDirection>=0) { mob.tell(_("You want to continue @x1.",Directions.getDirectionName(nextDirection))); final Room nextRoom=mob.location().getRoomInDir(nextDirection); if((nextRoom!=null)&&(nextRoom.getArea()==mob.location().getArea())) { final int dir=nextDirection; nextDirection=-2; CMLib.tracking().walk(mob,dir,false,false); } else unInvoke(); } } return true; } @Override public void affectPhyStats(Physical affectedEnv, PhyStats affectableStats) { affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_WORK); super.affectPhyStats(affectedEnv, affectableStats); } @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { super.executeMsg(myHost,msg); if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; if((msg.amISource(mob)) &&(msg.amITarget(mob.location())) &&(CMLib.flags().canBeSeenBy(mob.location(),mob)) &&(msg.targetMinor()==CMMsg.TYP_LOOK)) nextDirection=CMLib.tracking().trackNextDirectionFromHere(theTrail,mob.location(),true); } public boolean isSuitableMate(MOB mate, MOB forMe) { if(mate==forMe) return false; if((mate==null)||(forMe==null)) return false; if(mate.charStats().getStat(CharStats.STAT_GENDER)==forMe.charStats().getStat(CharStats.STAT_GENDER)) return false; if((mate.charStats().getStat(CharStats.STAT_GENDER)!='M') &&(mate.charStats().getStat(CharStats.STAT_GENDER)!='F')) return false; if(((mate.charStats().getMyRace().ID().equals("Human")) ||(mate.charStats().getMyRace().ID().equals("Human")) ||(mate.charStats().getMyRace().canBreedWith(mate.charStats().getMyRace()))) &&(mate.fetchWornItems(Wearable.WORN_LEGS|Wearable.WORN_WAIST,(short)-2048,(short)0).size()==0) &&(CMLib.flags().canBeSeenBy(mate,forMe))) return true; return false; } @Override public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel) { final MOB target=getTarget(mob,commands,givenTarget); if(target==null) return false; if((target.charStats().getStat(CharStats.STAT_GENDER)!='M') &&(target.charStats().getStat(CharStats.STAT_GENDER)!='F')) { mob.tell(_("@x1 is incapable of mating!",target.name(mob))); return false; } final List<Ability> V=CMLib.flags().flaggedAffects(mob,Ability.FLAG_TRACKING); for(final Ability A : V) A.unInvoke(); if(V.size()>0) { target.tell(_("You stop tracking.")); return true; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); TrackingLibrary.TrackingFlags flags; flags = new TrackingLibrary.TrackingFlags() .plus(TrackingLibrary.TrackingFlag.OPENONLY); final Vector rooms=new Vector(); List<Room> checkSet=CMLib.tracking().getRadiantRooms(mob.location(),flags,50); for (final Room R : checkSet) { if(R!=null) for(int i=0;i<R.numInhabitants();i++) { final MOB M=R.fetchInhabitant(i); if(isSuitableMate(M,target)) { rooms.addElement(R); break;} } } checkSet=null; //TrackingLibrary.TrackingFlags flags; flags = new TrackingLibrary.TrackingFlags() .plus(TrackingLibrary.TrackingFlag.OPENONLY) .plus(TrackingLibrary.TrackingFlag.NOEMPTYGRIDS) .plus(TrackingLibrary.TrackingFlag.NOAIR) .plus(TrackingLibrary.TrackingFlag.NOWATER); if(rooms.size()>0) theTrail=CMLib.tracking().findBastardTheBestWay(mob.location(),rooms,flags,50); if((success)&&(theTrail!=null)) { theTrail.add(mob.location()); // it worked, so build a copy of this ability, // and add it to the affects list of the // affected MOB. Then tell everyone else // what happened. final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?null:_("^S<S-NAME> chant(s) to <T-NAMESELF>.^?")); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); beneficialAffect(mob,target,asLevel,0); final Chant_FindMate A=(Chant_FindMate)target.fetchEffect(ID()); if(A!=null) { target.location().show(target,null,CMMsg.MSG_OK_VISUAL,_("<S-NAME> yearn(s) for a mate!")); A.makeLongLasting(); A.nextDirection=CMLib.tracking().trackNextDirectionFromHere(theTrail,mob.location(),true); target.recoverPhyStats(); } } } else beneficialWordsFizzle(mob,target,_("<S-NAME> chant(s) to <T-NAMESELF>, but nothing happen(s).")); // return whether it worked return success; } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.logging; import com.google.common.annotations.VisibleForTesting; import com.thoughtworks.xstream.XStream; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.BulkChange; import hudson.Extension; import hudson.FilePath; import hudson.RestrictedSince; import hudson.Util; import hudson.XmlFile; import hudson.model.AbstractModelObject; import hudson.model.AutoCompletionCandidates; import hudson.model.Computer; import hudson.model.Saveable; import hudson.model.TaskListener; import hudson.model.listeners.SaveableListener; import hudson.remoting.Channel; import hudson.remoting.VirtualChannel; import hudson.slaves.ComputerListener; import hudson.util.CopyOnWriteList; import hudson.util.HttpResponses; import hudson.util.RingBufferLogHandler; import hudson.util.XStream2; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.text.Collator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.LogRecord; import java.util.logging.Logger; import javax.servlet.ServletException; import jenkins.model.Jenkins; import jenkins.security.MasterToSlaveCallable; import jenkins.util.MemoryReductionUtil; import net.sf.json.JSONObject; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.interceptor.RequirePOST; import org.kohsuke.stapler.verb.POST; /** * Records a selected set of logs so that the system administrator * can diagnose a specific aspect of the system. * * TODO: still a work in progress. * * <h3>Access Control</h3> * {@link LogRecorder} is only visible for administrators and system readers, and this access control happens at * {@link jenkins.model.Jenkins#getLog()}, the sole entry point for binding {@link LogRecorder} to URL. * * @author Kohsuke Kawaguchi * @see LogRecorderManager */ public class LogRecorder extends AbstractModelObject implements Saveable { private volatile String name; /** * No longer used. * * @deprecated use {@link #getLoggers()} */ @Deprecated @Restricted(NoExternalUse.class) @RestrictedSince("TODO") public final transient CopyOnWriteList<Target> targets = new CopyOnWriteList<>(); private List<Target> loggers = new ArrayList<>(); private static final TargetComparator TARGET_COMPARATOR = new TargetComparator(); @DataBoundConstructor public LogRecorder(String name) { this.name = name; // register it only once when constructed, and when this object dies // WeakLogHandler will remove it new WeakLogHandler(handler, Logger.getLogger("")); } private Object readResolve() { if (loggers == null) { loggers = new ArrayList<>(); } List<Target> tempLoggers = new ArrayList<>(loggers); if (!targets.isEmpty()) { loggers.addAll(targets.getView()); } if (!tempLoggers.isEmpty() && !targets.getView().equals(tempLoggers)) { targets.addAll(tempLoggers); } return this; } public List<Target> getLoggers() { return loggers; } public void setLoggers(List<Target> loggers) { this.loggers = loggers; } @Restricted(NoExternalUse.class) Target[] orderedTargets() { // will contain targets ordered by reverse name length (place specific targets at the beginning) Target[] ts = loggers.toArray(new Target[]{}); Arrays.sort(ts, TARGET_COMPARATOR); return ts; } @Restricted(NoExternalUse.class) @VisibleForTesting public static Set<String> getAutoCompletionCandidates(List<String> loggerNamesList) { Set<String> loggerNames = new HashSet<>(loggerNamesList); // now look for package prefixes that make sense to offer for autocompletion: // Only prefixes that match multiple loggers will be shown. // Example: 'org' will show 'org', because there's org.apache, org.jenkinsci, etc. // 'io' might only show 'io.jenkins.plugins' rather than 'io' if all loggers starting with 'io' start with 'io.jenkins.plugins'. HashMap<String, Integer> seenPrefixes = new HashMap<>(); SortedSet<String> relevantPrefixes = new TreeSet<>(); for (String loggerName : loggerNames) { String[] loggerNameParts = loggerName.split("[.]"); String longerPrefix = null; for (int i = loggerNameParts.length; i > 0; i--) { String loggerNamePrefix = String.join(".", Arrays.copyOf(loggerNameParts, i)); seenPrefixes.put(loggerNamePrefix, seenPrefixes.getOrDefault(loggerNamePrefix, 0) + 1); if (longerPrefix == null) { relevantPrefixes.add(loggerNamePrefix); // actual logger name longerPrefix = loggerNamePrefix; continue; } if (seenPrefixes.get(loggerNamePrefix) > seenPrefixes.get(longerPrefix)) { relevantPrefixes.add(loggerNamePrefix); } longerPrefix = loggerNamePrefix; } } return relevantPrefixes; } @Restricted(NoExternalUse.class) public AutoCompletionCandidates doAutoCompleteLoggerName(@QueryParameter String value) { if (value == null) { return new AutoCompletionCandidates(); } // get names of all actual loggers known to Jenkins Set<String> candidateNames = new LinkedHashSet<>(getAutoCompletionCandidates(Collections.list(LogManager.getLogManager().getLoggerNames()))); for (String part : value.split("[ ]+")) { HashSet<String> partCandidates = new HashSet<>(); String lowercaseValue = part.toLowerCase(Locale.ENGLISH); for (String loggerName : candidateNames) { if (loggerName.toLowerCase(Locale.ENGLISH).contains(lowercaseValue)) { partCandidates.add(loggerName); } } candidateNames.retainAll(partCandidates); } AutoCompletionCandidates candidates = new AutoCompletionCandidates(); candidates.add(candidateNames.toArray(MemoryReductionUtil.EMPTY_STRING_ARRAY)); return candidates; } @Restricted(NoExternalUse.class) transient /*almost final*/ RingBufferLogHandler handler = new RingBufferLogHandler() { @Override public void publish(LogRecord record) { for (Target t : orderedTargets()) { Boolean match = t.matches(record); if (match == null) { // domain does not match, so continue looking continue; } if (match) { // most specific logger matches, so publish super.publish(record); } // most specific logger does not match, so don't publish // allows reducing log level for more specific loggers return; } } }; /** * Logger that this recorder monitors, and its log level. * Just a pair of (logger name,level) with convenience methods. */ public static final class Target { public final String name; private final int level; private transient /* almost final*/ Logger logger; public Target(String name, Level level) { this(name, level.intValue()); } public Target(String name, int level) { this.name = name; this.level = level; } @DataBoundConstructor public Target(String name, String level) { this(name, Level.parse(level)); } public Level getLevel() { return Level.parse(String.valueOf(level)); } public String getName() { return name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Target target = (Target) o; return level == target.level && Objects.equals(name, target.name); } @Override public int hashCode() { return Objects.hash(name, level); } @Deprecated public boolean includes(LogRecord r) { if (r.getLevel().intValue() < level) return false; // below the threshold if (name.length() == 0) { return true; // like root logger, includes everything } String logName = r.getLoggerName(); if (logName == null || !logName.startsWith(name)) return false; // not within this logger String rest = logName.substring(name.length()); return rest.startsWith(".") || rest.length() == 0; } @SuppressFBWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "converting this to YesNoMaybe would break backward compatibility") public Boolean matches(LogRecord r) { boolean levelSufficient = r.getLevel().intValue() >= level; if (name.length() == 0) { return levelSufficient; // include if level matches } String logName = r.getLoggerName(); if (logName == null || !logName.startsWith(name)) return null; // not in the domain of this logger String rest = logName.substring(name.length()); if (rest.startsWith(".") || rest.length() == 0) { return levelSufficient; // include if level matches } return null; } public Logger getLogger() { if (logger == null) { logger = Logger.getLogger(name); } return logger; } /** * Makes sure that the logger passes through messages at the correct level to us. */ public void enable() { Logger l = getLogger(); if (!l.isLoggable(getLevel())) l.setLevel(getLevel()); new SetLevel(name, getLevel()).broadcast(); } public void disable() { getLogger().setLevel(null); new SetLevel(name, null).broadcast(); } } private static class TargetComparator implements Comparator<Target>, Serializable { private static final long serialVersionUID = 9285340752515798L; @Override public int compare(Target left, Target right) { return right.getName().length() - left.getName().length(); } } private static final class SetLevel extends MasterToSlaveCallable<Void, Error> { /** known loggers (kept per agent), to avoid GC */ @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") private static final Set<Logger> loggers = new HashSet<>(); private final String name; private final Level level; SetLevel(String name, Level level) { this.name = name; this.level = level; } @Override public Void call() throws Error { Logger logger = Logger.getLogger(name); loggers.add(logger); logger.setLevel(level); return null; } void broadcast() { for (Computer c : Jenkins.get().getComputers()) { if (c.getName().length() > 0) { // i.e. not master VirtualChannel ch = c.getChannel(); if (ch != null) { try { ch.call(this); } catch (Exception x) { Logger.getLogger(LogRecorder.class.getName()).log(Level.WARNING, "could not set up logging on " + c, x); } } } } } } @Extension @Restricted(NoExternalUse.class) public static final class ComputerLogInitializer extends ComputerListener { @Override public void preOnline(Computer c, Channel channel, FilePath root, TaskListener listener) throws IOException, InterruptedException { for (LogRecorder recorder : Jenkins.get().getLog().getRecorders()) { for (Target t : recorder.getLoggers()) { channel.call(new SetLevel(t.name, t.getLevel())); } } } } @Override public String getDisplayName() { return name; } @Override public String getSearchUrl() { return Util.rawEncode(name); } public String getName() { return name; } public LogRecorderManager getParent() { return Jenkins.get().getLog(); } /** * Accepts submission from the configuration page. */ @POST public synchronized void doConfigSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); JSONObject src = req.getSubmittedForm(); String newName = src.getString("name"), redirect = "."; XmlFile oldFile = null; if (!name.equals(newName)) { Jenkins.checkGoodName(newName); oldFile = getConfigFile(); // rename List<LogRecorder> recorders = getParent().getRecorders(); recorders.remove(new LogRecorder(name)); this.name = newName; recorders.add(this); getParent().setRecorders(recorders); // ensure that legacy logRecorders field is synced on save redirect = "../" + Util.rawEncode(newName) + '/'; } List<Target> newTargets = req.bindJSONToList(Target.class, src.get("loggers")); setLoggers(newTargets); save(); if (oldFile != null) oldFile.delete(); rsp.sendRedirect2(redirect); } @RequirePOST public HttpResponse doClear() throws IOException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); handler.clear(); return HttpResponses.redirectToDot(); } /** * Loads the settings from a file. */ public synchronized void load() throws IOException { getConfigFile().unmarshal(this); loggers.forEach(Target::enable); } /** * Save the settings to a file. */ @Override public synchronized void save() throws IOException { if (BulkChange.contains(this)) return; handlePluginUpdatingLegacyLogManagerMap(); getConfigFile().write(this); loggers.forEach(Target::enable); SaveableListener.fireOnChange(this, getConfigFile()); } @SuppressWarnings("deprecation") // this is for compatibility private void handlePluginUpdatingLegacyLogManagerMap() { if (getParent().logRecorders.size() > getParent().getRecorders().size()) { for (LogRecorder logRecorder : getParent().logRecorders.values()) { if (!getParent().getRecorders().contains(logRecorder)) { getParent().getRecorders().add(logRecorder); } } } if (getParent().getRecorders().size() > getParent().logRecorders.size()) { for (LogRecorder logRecorder : getParent().getRecorders()) { if (!getParent().logRecorders.containsKey(logRecorder.getName())) { getParent().logRecorders.put(logRecorder.getName(), logRecorder); } } } } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } LogRecorder that = (LogRecorder) o; return name.equals(that.name); } @Override public int hashCode() { return Objects.hash(name); } /** * Deletes this recorder, then go back to the parent. */ @RequirePOST public synchronized void doDoDelete(StaplerResponse rsp) throws IOException, ServletException { Jenkins.get().checkPermission(Jenkins.ADMINISTER); getConfigFile().delete(); getParent().getRecorders().remove(new LogRecorder(name)); // Disable logging for all our targets, // then reenable all other loggers in case any also log the same targets loggers.forEach(Target::disable); getParent().getRecorders().forEach(logRecorder -> logRecorder.getLoggers().forEach(Target::enable)); rsp.sendRedirect2(".."); } /** * RSS feed for log entries. */ public void doRss(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { LogRecorderManager.doRss(req, rsp, getLogRecords()); } /** * The file we save our configuration. */ private XmlFile getConfigFile() { return new XmlFile(XSTREAM, new File(LogRecorderManager.configDir(), name + ".xml")); } /** * Gets a view of the log records. */ public List<LogRecord> getLogRecords() { return handler.getView(); } /** * Gets a view of log records per agent matching this recorder. * @return a map (sorted by display name) from computer to (nonempty) list of log records * @since 1.519 */ public Map<Computer, List<LogRecord>> getSlaveLogRecords() { Map<Computer, List<LogRecord>> result = new TreeMap<>(new Comparator<Computer>() { final Collator COLL = Collator.getInstance(); @Override public int compare(Computer c1, Computer c2) { return COLL.compare(c1.getDisplayName(), c2.getDisplayName()); } }); for (Computer c : Jenkins.get().getComputers()) { if (c.getName().length() == 0) { continue; // master } List<LogRecord> recs = new ArrayList<>(); try { for (LogRecord rec : c.getLogRecords()) { for (Target t : loggers) { if (t.includes(rec)) { recs.add(rec); break; } } } } catch (IOException | InterruptedException x) { continue; } if (!recs.isEmpty()) { result.put(c, recs); } } return result; } /** * Thread-safe reusable {@link XStream}. */ public static final XStream XSTREAM = new XStream2(); static { XSTREAM.alias("log", LogRecorder.class); XSTREAM.alias("target", Target.class); } /** * Log levels that can be configured for {@link Target}. */ public static List<Level> LEVELS = Arrays.asList(Level.ALL, Level.FINEST, Level.FINER, Level.FINE, Level.CONFIG, Level.INFO, Level.WARNING, Level.SEVERE, Level.OFF); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.StringReader; import java.math.BigDecimal; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.util.TestUtil; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.util.PhoenixRuntime; import org.junit.Test; public class SkipScanQueryIT extends ParallelStatsDisabledIT { private String initIntInTable(Connection conn, List<Integer> data) throws SQLException { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (" + " i INTEGER NOT NULL PRIMARY KEY)"; conn.createStatement().executeUpdate(ddl); // Test upsert correct values String query = "UPSERT INTO " + tableName + " VALUES(?)"; PreparedStatement stmt = conn.prepareStatement(query); for (Integer i : data) { stmt.setInt(1, i); stmt.execute(); } conn.commit(); return tableName; } private String initVarCharCrossProductInTable(Connection conn, List<String> c1, List<String> c2) throws SQLException { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (" + " s1 VARCHAR, s2 VARCHAR CONSTRAINT pk PRIMARY KEY (s1,s2))"; conn.createStatement().executeUpdate(ddl); // Test upsert correct values String query = "UPSERT INTO " + tableName + " VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); for (String s1 : c1) { for (String s2 : c2) { stmt.setString(1, s1); stmt.setString(2, s2); stmt.execute(); } } conn.commit(); return tableName; } private String initVarCharParallelListInTable(Connection conn, List<String> c1, List<String> c2) throws SQLException { String tableName = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + tableName + " (" + " s1 VARCHAR, s2 VARCHAR CONSTRAINT pk PRIMARY KEY (s1,s2))"; conn.createStatement().executeUpdate(ddl); // Test upsert correct values String query = "UPSERT INTO " + tableName + " VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(query); for (int i = 0; i < c1.size(); i++) { stmt.setString(1, c1.get(i)); stmt.setString(2, i < c2.size() ? c2.get(i) : null); stmt.execute(); } conn.commit(); return tableName; } private static String UPSERT_SELECT_AFTER_UPSERT_STATEMENTS = "upsert into %s(c1, c2, c3, c4, v1, v2) values('1001', '91', 's1', '2013-09-26', 28397, 23541);\n" + "upsert into %s(c1, c2, c3, c4, v1, v2) values('1001', '91', 's2', '2013-09-23', 3369, null);\n"; private String initSelectAfterUpsertTable(Connection conn) throws Exception { String tableName = generateUniqueName(); String ddl = "create table if not exists " + tableName + " (" + "c1 VARCHAR NOT NULL," + "c2 VARCHAR NOT NULL," + "c3 VARCHAR NOT NULL," + "c4 VARCHAR NOT NULL," + "v1 integer," + "v2 integer " + "CONSTRAINT PK PRIMARY KEY (c1, c2, c3, c4)" + ")"; conn.createStatement().execute(ddl); // Test upsert correct values StringReader reader = new StringReader(String.format(UPSERT_SELECT_AFTER_UPSERT_STATEMENTS, tableName, tableName)); PhoenixRuntime.executeStatements(conn, reader, Collections.emptyList()); reader.close(); conn.commit(); return tableName; } @Test public void testSkipScanFilterQuery() throws Exception { String tableName = generateUniqueName(); String createTableDDL = "CREATE TABLE " + tableName + "(col1 VARCHAR," + "col2 VARCHAR," + "col3 VARCHAR," + "col4 VARCHAR," + "CONSTRAINT pk " + "PRIMARY KEY (col1,col2,col3,col4))"; String upsertQuery = "upsert into " + tableName + " values(?,?,?,?)"; String query = "SELECT col1, col2, col3, col4 FROM " + tableName + " WHERE col1 IN ('a','e','f') AND col2 = 'b' AND col4 = '1' "; String[] col1Values = { "a", "e.f", "f" }; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); createTestTable(getUrl(), createTableDDL); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(true); try { PreparedStatement statement = conn.prepareStatement(upsertQuery); for (String col1Value : col1Values) { statement.setString(1, col1Value); statement.setString(2, "b"); statement.setString(3, ""); statement.setString(4, "1"); statement.execute(); } ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals(rs.getString(1), "a"); assertTrue(rs.next()); assertEquals(rs.getString(1), "f"); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectAfterUpsertInQuery() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initSelectAfterUpsertTable(conn); try { String query; query = "SELECT case when sum(v2)*1.0/sum(v1) is null then 0 else sum(v2)*1.0/sum(v1) END AS val FROM " + tableName + " WHERE c1='1001' AND c2 = '91' " + "AND c3 IN ('s1','s2') AND c4='2013-09-24'"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals(0, rs.getInt(1)); } finally { conn.close(); } } @Test public void testInQuery() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); conn.setAutoCommit(false); String tableName = initIntInTable(conn,Arrays.asList(2,7,10)); try { String query; query = "SELECT i FROM " + tableName + " WHERE i IN (1,2,4,5,7,8,10)"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); assertTrue(rs.next()); assertEquals(7, rs.getInt(1)); assertTrue(rs.next()); assertEquals(10, rs.getInt(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testVarCharParallelListInQuery() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); conn.setAutoCommit(false); String tableName = initVarCharParallelListInTable(conn,Arrays.asList("d","da","db"),Arrays.asList("m","mc","tt")); try { String query; query = "SELECT s1,s2 FROM " + tableName + " WHERE s1 IN ('a','b','da','db') AND s2 IN ('c','ma','m','mc','ttt','z')"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals("da", rs.getString(1)); assertEquals("mc", rs.getString(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testVarCharXInQuery() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); conn.setAutoCommit(false); String tableName = initVarCharCrossProductInTable(conn,Arrays.asList("d","da","db"),Arrays.asList("m","mc","tt")); try { String query; query = "SELECT s1,s2 FROM " + tableName + " WHERE s1 IN ('a','b','da','db') AND s2 IN ('c','ma','m','mc','ttt','z')"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals("da", rs.getString(1)); assertEquals("m", rs.getString(2)); assertTrue(rs.next()); assertEquals("da", rs.getString(1)); assertEquals("mc", rs.getString(2)); assertTrue(rs.next()); assertEquals("db", rs.getString(1)); assertEquals("m", rs.getString(2)); assertTrue(rs.next()); assertEquals("db", rs.getString(1)); assertEquals("mc", rs.getString(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testVarCharXIntInQuery() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); conn.setAutoCommit(false); String tableName = initVarCharCrossProductInTable(conn,Arrays.asList("d","da","db"),Arrays.asList("m","mc","tt")); try { String query; query = "SELECT s1,s2 FROM " + tableName + " WHERE s1 IN ('a','b','da','db') AND s2 IN ('c','ma','m','mc','ttt','z') " + "AND s1 > 'd' AND s1 < 'db' AND s2 > 'm'"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals("da", rs.getString(1)); assertEquals("mc", rs.getString(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testPreSplitCompositeFixedKey() throws Exception { String tableName = generateUniqueName(); Connection conn = DriverManager.getConnection(getUrl()); try { conn.createStatement().execute("create table " + tableName + "(key_1 char(3) not null, key_2 char(4) not null, v varchar(8) CONSTRAINT pk PRIMARY KEY (key_1,key_2)) split on('000','100','200')"); conn.setAutoCommit(true); conn.createStatement().execute("upsert into " + tableName + " values('000','aaaa','value_1')"); conn.createStatement().execute("upsert into " + tableName + " values('000','aabb','value_2')"); conn.createStatement().execute("upsert into " + tableName + " values('100','aacc','value_3')"); conn.createStatement().execute("upsert into " + tableName + " values('100','aadd','value_4')"); conn.createStatement().execute("upsert into " + tableName + " values('200','aaee','value_5')"); conn.createStatement().execute("upsert into " + tableName + " values('201','aaff','value_6')"); ResultSet rs = conn.createStatement().executeQuery("select * from " + tableName + " where key_1>='000' and key_1<'200' and key_2>='aabb' and key_2<'aadd'"); assertTrue(rs.next()); assertEquals("000", rs.getString(1)); assertEquals("aabb", rs.getString(2)); assertEquals("value_2", rs.getString(3)); assertTrue(rs.next()); assertEquals("100", rs.getString(1)); assertEquals("aacc", rs.getString(2)); assertEquals("value_3", rs.getString(3)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testInWithDescKey() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); try { conn.createStatement().execute("create table " + tableName + "(key_1 char(3) not null, key_2 char(4) not null, v varchar(8) CONSTRAINT pk PRIMARY KEY (key_1,key_2 desc))"); conn.setAutoCommit(true); conn.createStatement().execute("upsert into " + tableName + " values('000','aaaa','value_1')"); conn.createStatement().execute("upsert into " + tableName + " values('000','aabb','value_2')"); conn.createStatement().execute("upsert into " + tableName + " values('100','aacc','value_3')"); conn.createStatement().execute("upsert into " + tableName + " values('100','aadd','value_4')"); conn.createStatement().execute("upsert into " + tableName + " values('200','aaee','value_5')"); conn.createStatement().execute("upsert into " + tableName + " values('201','aaff','value_6')"); ResultSet rs = conn.createStatement().executeQuery("select * from " + tableName + " where key_1>='000' and key_1<'200' and key_2>='aabb' and key_2<'aadd'"); assertTrue(rs.next()); assertEquals("000", rs.getString(1)); assertEquals("aabb", rs.getString(2)); assertEquals("value_2", rs.getString(3)); assertTrue(rs.next()); assertEquals("100", rs.getString(1)); assertEquals("aacc", rs.getString(2)); assertEquals("value_3", rs.getString(3)); assertFalse(rs.next()); rs = conn.createStatement().executeQuery("select * from " + tableName + " where (key_1,key_2) in (('100','aacc'),('100','aadd'))"); assertTrue(rs.next()); assertEquals("100", rs.getString(1)); assertEquals("aadd", rs.getString(2)); assertEquals("value_4", rs.getString(3)); assertTrue(rs.next()); assertEquals("100", rs.getString(1)); assertEquals("aacc", rs.getString(2)); assertEquals("value_3", rs.getString(3)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSkipScanIntersectionAtEnd() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); conn.createStatement() .execute( "create table " + tableName + "(pk1 UNSIGNED_TINYINT NOT NULL, pk2 UNSIGNED_TINYINT NOT NULL, pk3 UNSIGNED_TINYINT NOT NULL, kv VARCHAR " + "CONSTRAINT pk PRIMARY KEY (pk1, pk2, pk3)) SPLIT ON ('" + Bytes.toString(new byte[] { 1, 1 }) + "', '" + Bytes.toString(new byte[] { 2, 1 }) + "', '" + Bytes.toString(new byte[] { 3, 1 }) + "')"); conn.createStatement().execute("upsert into " + tableName + " values (0, 1, 1, 'a')"); conn.createStatement().execute("upsert into " + tableName + " values (1, 1, 1, 'a')"); conn.createStatement().execute("upsert into " + tableName + " values (2, 1, 1, 'a')"); conn.createStatement().execute("upsert into " + tableName + " values (3, 1, 1, 'a')"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery("select count(kv) from " + tableName + " where pk1 in (0, 1, 2, 3) AND pk2 = 1"); assertTrue(rs.next()); assertEquals(4, rs.getInt(1)); assertFalse(rs.next()); } @Test public void testSkipScanFilterWhenTableHasMultipleColumnFamilies() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); String tableName = generateUniqueName(); String fullTableName = SchemaUtil.getTableName(TestUtil.DEFAULT_SCHEMA_NAME, tableName); try { TestUtil.createMultiCFTestTable(conn , fullTableName, null); populateMultiCFTestTable(fullTableName); String upsert = "UPSERT INTO " + fullTableName + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; PreparedStatement stmt = conn.prepareStatement(upsert); stmt.setString(1, "varchar4"); stmt.setString(2, "char1"); stmt.setInt(3, 1); stmt.setLong(4, 1L); stmt.setBigDecimal(5, new BigDecimal("1.1")); stmt.setString(6, "varchar_a"); stmt.setString(7, "chara"); stmt.setInt(8, 2); stmt.setLong(9, 2L); stmt.setBigDecimal(10, new BigDecimal("2.1")); stmt.setString(11, "varchar_b"); stmt.setString(12, "charb"); stmt.setInt(13, 3); stmt.setLong(14, 3L); stmt.setBigDecimal(15, new BigDecimal("3.1")); stmt.setDate(16, null); stmt.executeUpdate(); stmt.setString(1, "varchar5"); stmt.setString(2, "char2"); stmt.setInt(3, 2); stmt.setLong(4, 2L); stmt.setBigDecimal(5, new BigDecimal("2.2")); stmt.setString(6, "varchar_a"); stmt.setString(7, "chara"); stmt.setInt(8, 3); stmt.setLong(9, 3L); stmt.setBigDecimal(10, new BigDecimal("3.2")); stmt.setString(11, "varchar_b"); stmt.setString(12, "charb"); stmt.setInt(13, 4); stmt.setLong(14, 4L); stmt.setBigDecimal(15, new BigDecimal("4.2")); stmt.setDate(16, null); stmt.executeUpdate(); stmt.setString(1, "varchar6"); stmt.setString(2, "char3"); stmt.setInt(3, 3); stmt.setLong(4, 3L); stmt.setBigDecimal(5, new BigDecimal("3.3")); stmt.setString(6, "varchar_a"); stmt.setString(7, "chara"); stmt.setInt(8, 4); stmt.setLong(9, 4L); stmt.setBigDecimal(10, new BigDecimal("4.3")); stmt.setString(11, "varchar_b"); stmt.setString(12, "charb"); stmt.setInt(13, 5); stmt.setLong(14, 5L); stmt.setBigDecimal(15, new BigDecimal("5.3")); stmt.setDate(16, null); stmt.executeUpdate(); conn.commit(); String query = "SELECT char_col1, int_col1, long_col2 from " + fullTableName + " where varchar_pk in ('varchar3','varchar6')"; ResultSet rs = conn.createStatement().executeQuery(query); assertTrue(rs.next()); assertEquals("chara", rs.getString(1)); assertEquals(4, rs.getInt(2)); assertEquals(5L, rs.getLong(3)); assertTrue(rs.next()); assertEquals("chara", rs.getString(1)); assertEquals(4, rs.getInt(2)); assertEquals(5L, rs.getLong(3)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testOrPKWithAndNonPK() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); try { conn.createStatement().execute("create table " + tableName + "(ID varchar primary key,company varchar)"); conn.setAutoCommit(true); conn.createStatement().execute("upsert into " + tableName + " values('i1','c1')"); conn.createStatement().execute("upsert into " + tableName + " values('i2','c2')"); conn.createStatement().execute("upsert into " + tableName + " values('i3','c3')"); ResultSet rs = conn.createStatement().executeQuery("select * from " + tableName + " where ID = 'i1' or (ID = 'i2' and company = 'c3')"); assertTrue(rs.next()); assertEquals("i1", rs.getString(1)); assertEquals("c1", rs.getString(2)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testNullInfiniteLoop() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); conn.setAutoCommit(true); conn.createStatement().execute( "create table " + tableName + "("+ "CREATETIME VARCHAR,"+ "ACCOUNTID VARCHAR,"+ "SERVICENAME VARCHAR,"+ "SPAN.APPID VARCHAR,"+ "CONSTRAINT pk PRIMARY KEY(CREATETIME,ACCOUNTID,SERVICENAME)"+ ")"); conn.createStatement().execute("upsert into " + tableName + "(CREATETIME,SERVICENAME,SPAN.APPID) values('20160116141006','servlet','android')"); conn.createStatement().execute("upsert into " + tableName + "(CREATETIME,ACCOUNTID,SERVICENAME,SPAN.APPID) values('20160116151006','2404787','jdbc','ios')"); ResultSet rs = conn.createStatement().executeQuery("select * from " + tableName + " where CREATETIME>='20160116121006' and CREATETIME<='20160116181006' and ACCOUNTID='2404787'"); assertTrue(rs.next()); assertFalse(rs.next()); } } @Test public void testSkipScanQueryWhenSplitKeyIsSmaller() throws Exception { try (Connection conn = DriverManager.getConnection(getUrl())) { String tableName = generateUniqueName(); StringBuffer buf = new StringBuffer("CREATE TABLE IF NOT EXISTS " + tableName + "(ORGANIZATION_ID CHAR(15) NOT NULL," + "FEED_ITEM_ID CHAR(15) NOT NULL," + "EXTENSION VARCHAR(128) NOT NULL," + "CREATED_TIME TIMESTAMP," + "LAST_UPDATE TIMESTAMP," + "LAST_ACCESSED TIMESTAMP," + "VERSION INTEGER," + "DATA.PAYLOAD VARCHAR(512000)" + "CONSTRAINT PK PRIMARY KEY" + "(" + " ORGANIZATION_ID," + " FEED_ITEM_ID," + " EXTENSION" + ")" + ")"); conn.createStatement().execute(buf.toString()); String upsert = "UPSERT INTO " + tableName + " (ORGANIZATION_ID, FEED_ITEM_ID, EXTENSION) VALUES (?, ?, ?)"; PreparedStatement stmt = conn.prepareStatement(upsert); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002MK5Uu"); stmt.setString(3, "FI"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002MK5Uu"); stmt.setString(3, "T0"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002QWbP0"); stmt.setString(3, "FI"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002QWbP0"); stmt.setString(3, "T0"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002QXXL2"); stmt.setString(3, "FI"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002QXXL2"); stmt.setString(3, "T0"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002RhvtQ"); stmt.setString(3, "FI"); stmt.executeUpdate(); stmt.setString(1, "00Do0000000a8w1"); stmt.setString(2, "0D5o000002RhvtQ"); stmt.setString(3, "T0"); stmt.executeUpdate(); conn.commit(); try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { /* * The split key is 27 bytes instead of at least 30 bytes (CHAR(15) + CHAR(15)). * Note that we cannot use the phoenix way of giving split points in the ddl because * it ends up padding the split point bytes to 30. */ byte[] smallSplitKey = Bytes.toBytes("00Do0000000a8w10D5o000002Rhv"); admin.split(Bytes.toBytes(tableName), smallSplitKey); } ResultSet rs = conn.createStatement().executeQuery("SELECT EXTENSION FROM " + tableName + " WHERE " + "ORGANIZATION_ID = '00Do0000000a8w1' AND " + "FEED_ITEM_ID IN " + "('0D5o000002MK5Uu','0D5o000002QWbP0','0D5o000002QXXL2','0D5o000002RhvtQ') ORDER BY ORGANIZATION_ID, FEED_ITEM_ID, EXTENSION"); assertTrue(rs.next()); assertEquals("FI", rs.getString(1)); assertTrue(rs.next()); assertEquals("T0", rs.getString(1)); assertTrue(rs.next()); assertEquals("FI", rs.getString(1)); assertTrue(rs.next()); assertEquals("T0", rs.getString(1)); assertTrue(rs.next()); assertEquals("FI", rs.getString(1)); assertTrue(rs.next()); assertEquals("T0", rs.getString(1)); assertTrue(rs.next()); assertEquals("FI", rs.getString(1)); assertTrue(rs.next()); assertEquals("T0", rs.getString(1)); assertFalse(rs.next()); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.documentai.v1beta3.model; /** * A phrase in the text that is a known entity type, such as a person, an organization, or location. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Document AI API. For a detailed explanation * see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudDocumentaiV1beta1DocumentEntity extends com.google.api.client.json.GenericJson { /** * Optional. Confidence of detected Schema entity. Range [0, 1]. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Float confidence; /** * Optional. Canonical id. This will be a unique value in the entity list for this document. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Deprecated. Use `id` field instead. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String mentionId; /** * Text value in the document e.g. `1600 Amphitheatre Pkwy`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String mentionText; /** * Optional. Normalized entity value. Absent if the extracted value could not be converted or the * type (e.g. address) is not supported for certain parsers. This field is also only populated for * certain supported document types. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDocumentaiV1beta1DocumentEntityNormalizedValue normalizedValue; /** * Optional. Represents the provenance of this entity wrt. the location on the page where it was * found. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDocumentaiV1beta1DocumentPageAnchor pageAnchor; /** * Optional. Entities can be nested to form a hierarchical data structure representing the content * in the document. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudDocumentaiV1beta1DocumentEntity> properties; /** * Optional. The history of this annotation. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDocumentaiV1beta1DocumentProvenance provenance; /** * Optional. Whether the entity will be redacted for de-identification purposes. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean redacted; /** * Provenance of the entity. Text anchor indexing into the Document.text. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudDocumentaiV1beta1DocumentTextAnchor textAnchor; /** * Entity type from a schema e.g. `Address`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String type; /** * Optional. Confidence of detected Schema entity. Range [0, 1]. * @return value or {@code null} for none */ public java.lang.Float getConfidence() { return confidence; } /** * Optional. Confidence of detected Schema entity. Range [0, 1]. * @param confidence confidence or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setConfidence(java.lang.Float confidence) { this.confidence = confidence; return this; } /** * Optional. Canonical id. This will be a unique value in the entity list for this document. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * Optional. Canonical id. This will be a unique value in the entity list for this document. * @param id id or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setId(java.lang.String id) { this.id = id; return this; } /** * Deprecated. Use `id` field instead. * @return value or {@code null} for none */ public java.lang.String getMentionId() { return mentionId; } /** * Deprecated. Use `id` field instead. * @param mentionId mentionId or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setMentionId(java.lang.String mentionId) { this.mentionId = mentionId; return this; } /** * Text value in the document e.g. `1600 Amphitheatre Pkwy`. * @return value or {@code null} for none */ public java.lang.String getMentionText() { return mentionText; } /** * Text value in the document e.g. `1600 Amphitheatre Pkwy`. * @param mentionText mentionText or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setMentionText(java.lang.String mentionText) { this.mentionText = mentionText; return this; } /** * Optional. Normalized entity value. Absent if the extracted value could not be converted or the * type (e.g. address) is not supported for certain parsers. This field is also only populated for * certain supported document types. * @return value or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntityNormalizedValue getNormalizedValue() { return normalizedValue; } /** * Optional. Normalized entity value. Absent if the extracted value could not be converted or the * type (e.g. address) is not supported for certain parsers. This field is also only populated for * certain supported document types. * @param normalizedValue normalizedValue or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setNormalizedValue(GoogleCloudDocumentaiV1beta1DocumentEntityNormalizedValue normalizedValue) { this.normalizedValue = normalizedValue; return this; } /** * Optional. Represents the provenance of this entity wrt. the location on the page where it was * found. * @return value or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentPageAnchor getPageAnchor() { return pageAnchor; } /** * Optional. Represents the provenance of this entity wrt. the location on the page where it was * found. * @param pageAnchor pageAnchor or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setPageAnchor(GoogleCloudDocumentaiV1beta1DocumentPageAnchor pageAnchor) { this.pageAnchor = pageAnchor; return this; } /** * Optional. Entities can be nested to form a hierarchical data structure representing the content * in the document. * @return value or {@code null} for none */ public java.util.List<GoogleCloudDocumentaiV1beta1DocumentEntity> getProperties() { return properties; } /** * Optional. Entities can be nested to form a hierarchical data structure representing the content * in the document. * @param properties properties or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setProperties(java.util.List<GoogleCloudDocumentaiV1beta1DocumentEntity> properties) { this.properties = properties; return this; } /** * Optional. The history of this annotation. * @return value or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentProvenance getProvenance() { return provenance; } /** * Optional. The history of this annotation. * @param provenance provenance or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setProvenance(GoogleCloudDocumentaiV1beta1DocumentProvenance provenance) { this.provenance = provenance; return this; } /** * Optional. Whether the entity will be redacted for de-identification purposes. * @return value or {@code null} for none */ public java.lang.Boolean getRedacted() { return redacted; } /** * Optional. Whether the entity will be redacted for de-identification purposes. * @param redacted redacted or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setRedacted(java.lang.Boolean redacted) { this.redacted = redacted; return this; } /** * Provenance of the entity. Text anchor indexing into the Document.text. * @return value or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentTextAnchor getTextAnchor() { return textAnchor; } /** * Provenance of the entity. Text anchor indexing into the Document.text. * @param textAnchor textAnchor or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setTextAnchor(GoogleCloudDocumentaiV1beta1DocumentTextAnchor textAnchor) { this.textAnchor = textAnchor; return this; } /** * Entity type from a schema e.g. `Address`. * @return value or {@code null} for none */ public java.lang.String getType() { return type; } /** * Entity type from a schema e.g. `Address`. * @param type type or {@code null} for none */ public GoogleCloudDocumentaiV1beta1DocumentEntity setType(java.lang.String type) { this.type = type; return this; } @Override public GoogleCloudDocumentaiV1beta1DocumentEntity set(String fieldName, Object value) { return (GoogleCloudDocumentaiV1beta1DocumentEntity) super.set(fieldName, value); } @Override public GoogleCloudDocumentaiV1beta1DocumentEntity clone() { return (GoogleCloudDocumentaiV1beta1DocumentEntity) super.clone(); } }
/* * Copyright (c) 2015-present, Jim Kynde Meyer * All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.intellij.lang.jsgraphql.ide.structureView; import com.google.common.collect.Lists; import com.intellij.ide.structureView.StructureViewTreeElement; import com.intellij.ide.structureView.impl.common.PsiTreeElementBase; import com.intellij.lang.jsgraphql.psi.GraphQLArgument; import com.intellij.lang.jsgraphql.psi.GraphQLArguments; import com.intellij.lang.jsgraphql.psi.GraphQLArgumentsDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLEnumValueDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLEnumValueDefinitions; import com.intellij.lang.jsgraphql.psi.GraphQLField; import com.intellij.lang.jsgraphql.psi.GraphQLFieldDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLFieldsDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLFile; import com.intellij.lang.jsgraphql.psi.GraphQLFragmentDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLFragmentSelection; import com.intellij.lang.jsgraphql.psi.GraphQLFragmentSpread; import com.intellij.lang.jsgraphql.psi.GraphQLIdentifier; import com.intellij.lang.jsgraphql.psi.GraphQLInlineFragment; import com.intellij.lang.jsgraphql.psi.GraphQLInputObjectValueDefinitions; import com.intellij.lang.jsgraphql.psi.GraphQLInputValueDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLNamedElement; import com.intellij.lang.jsgraphql.psi.GraphQLOperationDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLSelection; import com.intellij.lang.jsgraphql.psi.GraphQLSelectionSet; import com.intellij.lang.jsgraphql.psi.GraphQLSelectionSetOperationDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLTypeCondition; import com.intellij.lang.jsgraphql.psi.GraphQLTypeName; import com.intellij.lang.jsgraphql.psi.GraphQLTypeNameDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLTypeSystemDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLTypedOperationDefinition; import com.intellij.lang.jsgraphql.psi.GraphQLUnionMembers; import com.intellij.lang.jsgraphql.psi.GraphQLUnionMembership; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiNameIdentifierOwner; import com.intellij.psi.util.PsiTreeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; /** * A node in the GraphQL structure tree view */ public class GraphQLStructureViewTreeElement extends PsiTreeElementBase<PsiElement> { final PsiElement childrenBase; private final PsiElement element; public GraphQLStructureViewTreeElement(PsiElement childrenBase, PsiElement psiElement) { super(psiElement); this.element = psiElement; this.childrenBase = childrenBase; } @NotNull @Override public Collection<StructureViewTreeElement> getChildrenBase() { final List<StructureViewTreeElement> children = Lists.newArrayList(); // See GraphQLParser.bnf for structure reference if (childrenBase instanceof GraphQLFile) { addFileChildren(children); } else if (childrenBase instanceof GraphQLSelectionSet) { addSelectionSetChildren(children); } else if (childrenBase instanceof GraphQLSelectionSetOperationDefinition) { addSelectionSetChildren(children); } else if (childrenBase instanceof GraphQLField) { addFieldChildren(children); } else if (childrenBase instanceof GraphQLFieldsDefinition) { for (GraphQLFieldDefinition fieldDefinition : ((GraphQLFieldsDefinition) childrenBase).getFieldDefinitionList()) { children.add(new GraphQLStructureViewTreeElement(fieldDefinition, fieldDefinition.getNameIdentifier())); } } else if (childrenBase instanceof GraphQLEnumValueDefinitions) { for (GraphQLEnumValueDefinition enumValueDefinition : ((GraphQLEnumValueDefinitions) childrenBase).getEnumValueDefinitionList()) { final GraphQLIdentifier nameIdentifier = enumValueDefinition.getEnumValue().getNameIdentifier(); children.add(new GraphQLStructureViewTreeElement(nameIdentifier, nameIdentifier)); } } else if (childrenBase instanceof GraphQLInputObjectValueDefinitions) { for (GraphQLInputValueDefinition valueDefinition : ((GraphQLInputObjectValueDefinitions) childrenBase).getInputValueDefinitionList()) { final GraphQLIdentifier nameIdentifier = valueDefinition.getNameIdentifier(); children.add(new GraphQLStructureViewTreeElement(nameIdentifier, nameIdentifier)); } } else if (childrenBase instanceof GraphQLUnionMembership) { final GraphQLUnionMembers unionMembers = ((GraphQLUnionMembership) childrenBase).getUnionMembers(); if (unionMembers != null) { for (GraphQLTypeName unionTypeName : unionMembers.getTypeNameList()) { final GraphQLIdentifier nameIdentifier = unionTypeName.getNameIdentifier(); children.add(new GraphQLStructureViewTreeElement(nameIdentifier, nameIdentifier)); } } } else if (childrenBase instanceof GraphQLFieldDefinition) { final GraphQLArgumentsDefinition argumentsDefinition = ((GraphQLFieldDefinition) childrenBase).getArgumentsDefinition(); if (argumentsDefinition != null) { for (GraphQLInputValueDefinition valueDefinition : argumentsDefinition.getInputValueDefinitionList()) { final GraphQLIdentifier nameIdentifier = valueDefinition.getNameIdentifier(); children.add(new GraphQLStructureViewTreeElement(nameIdentifier, nameIdentifier)); } } } return children; } private void addFieldChildren(List<StructureViewTreeElement> children) { final GraphQLField field = (GraphQLField) this.childrenBase; final GraphQLArguments arguments = field.getArguments(); if (arguments != null) { for (GraphQLArgument argument : arguments.getArgumentList()) { children.add(new GraphQLStructureViewTreeElement(argument, argument.getNameIdentifier())); } } if (field.getSelectionSet() != null) { addSelectionSetChildren(children); } } private void addSelectionSetChildren(List<StructureViewTreeElement> children) { GraphQLSelectionSet selectionSet; if (childrenBase instanceof GraphQLSelectionSet) { selectionSet = (GraphQLSelectionSet) childrenBase; } else if (childrenBase instanceof GraphQLSelectionSetOperationDefinition) { selectionSet = ((GraphQLSelectionSetOperationDefinition) childrenBase).getSelectionSet(); } else if (childrenBase instanceof GraphQLField) { selectionSet = ((GraphQLField) childrenBase).getSelectionSet(); } else { return; } for (GraphQLSelection selection : selectionSet.getSelectionList()) { final GraphQLField field = selection.getField(); if (field != null) { children.add(new GraphQLStructureViewTreeElement(field, field.getNameIdentifier())); } else { final GraphQLFragmentSelection fragmentSelection = selection.getFragmentSelection(); if (fragmentSelection != null) { GraphQLFragmentSpread fragmentSpread = fragmentSelection.getFragmentSpread(); if (fragmentSpread != null) { children.add(new GraphQLStructureViewTreeElement(fragmentSpread, fragmentSpread.getNameIdentifier())); } else { GraphQLInlineFragment inlineFragment = fragmentSelection.getInlineFragment(); if (inlineFragment != null && inlineFragment.getSelectionSet() != null) { if (inlineFragment.getTypeCondition() != null && inlineFragment.getTypeCondition().getTypeName() != null) { children.add(new GraphQLStructureViewTreeElement(inlineFragment.getSelectionSet(), inlineFragment)); } } } } } } } private void addFileChildren(List<StructureViewTreeElement> children) { for (PsiElement child : childrenBase.getChildren()) { PsiElement nodeChildrenBase = child; PsiElement nodeElement = child; if (child instanceof GraphQLOperationDefinition) { GraphQLIdentifier nameIdentifier = ((GraphQLOperationDefinition) child).getNameIdentifier(); if (nameIdentifier != null) { nodeElement = nameIdentifier; } if (child instanceof GraphQLTypedOperationDefinition) { nodeChildrenBase = ((GraphQLTypedOperationDefinition) child).getSelectionSet(); } children.add(new GraphQLStructureViewTreeElement(nodeChildrenBase, nodeElement)); } else if (child instanceof GraphQLFragmentDefinition) { GraphQLIdentifier nameIdentifier = ((GraphQLFragmentDefinition) child).getNameIdentifier(); if (nameIdentifier != null) { nodeElement = nameIdentifier; } nodeChildrenBase = ((GraphQLFragmentDefinition) child).getSelectionSet(); children.add(new GraphQLStructureViewTreeElement(nodeChildrenBase, nodeElement)); } else if (child instanceof GraphQLTypeSystemDefinition) { // the name of the node is type name def for schema definitions, and type nae for schema type extensions GraphQLNamedElement schemaNodeElement = PsiTreeUtil.findChildOfAnyType(child, GraphQLTypeNameDefinition.class, GraphQLTypeName.class); // children of the type definitions/extensions are found in the follow element types PsiElement schemaNodeChildrenBase = PsiTreeUtil.findChildOfAnyType( child, GraphQLFieldsDefinition.class, GraphQLEnumValueDefinitions.class, GraphQLInputObjectValueDefinitions.class, GraphQLUnionMembership.class ); if (schemaNodeElement != null && schemaNodeChildrenBase != null) { final PsiElement nodeIdentifier = schemaNodeElement.getNameIdentifier() != null ? schemaNodeElement.getNameIdentifier() : schemaNodeElement; children.add(new GraphQLStructureViewTreeElement(schemaNodeChildrenBase, nodeIdentifier)); } } } } @Nullable @Override public String getPresentableText() { if (element instanceof GraphQLSelectionSetOperationDefinition) { return "anonymous query"; // "{}" selection as root, which corresponds to anonymous query } if (element instanceof GraphQLInlineFragment) { String text = "... on"; GraphQLTypeCondition typeCondition = ((GraphQLInlineFragment) element).getTypeCondition(); if (typeCondition != null && typeCondition.getTypeName() != null) { text += " " + typeCondition.getTypeName().getName(); } return text; } if (element instanceof GraphQLNamedElement) { String name = ((GraphQLNamedElement) element).getName(); if (name == null && element instanceof GraphQLTypedOperationDefinition) { return "anonymous query"; // "query(args) {}" } return name; } if (element instanceof PsiNameIdentifierOwner) { final PsiElement nameIdentifier = ((PsiNameIdentifierOwner) element).getNameIdentifier(); if (nameIdentifier != null) { return nameIdentifier.getText(); } } return element.getText(); } }
/******************************************************************************* * Copyright (c) 2013, SAP AG * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * - Neither the name of the SAP AG nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package com.sap.research.primelife.pep; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import javax.xml.bind.JAXBException; import javax.xml.datatype.DatatypeConfigurationException; import org.herasaf.xacml.core.ProcessingException; import org.herasaf.xacml.core.policy.MissingAttributeException; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import com.sap.research.primelife.dao.PolicyDao; import com.sap.research.primelife.ds.pdp.matching.HandyAuthorizationsSet; import com.sap.research.primelife.ds.pep.PEP; import com.sap.research.primelife.exceptions.MissingPreferenceGroupException; import com.sap.research.primelife.exceptions.SyntaxException; import com.sap.research.primelife.exceptions.WritingException; import com.sap.research.primelife.marshalling.UnmarshallImpl; import eu.primelife.ppl.policy.impl.PolicyType; import eu.primelife.ppl.policy.impl.RuleType; public class TestUpdatePreferenceGroups { private static PolicyDao dao; private PolicyType readPolicyPrefs(String path) throws JAXBException, SyntaxException { UnmarshallImpl unmarshal = new UnmarshallImpl( PolicyType.class.getPackage()); return (PolicyType) unmarshal.unmarshal(getClass().getResourceAsStream( path)); } @BeforeClass public static void setUp() { dao = new PolicyDao(); dao.deleteAllPreferenceGroups(); } @Test public void testUpdatePreferenceGroup() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { String policy = readFile("/testUpdatePrefGroups/demo_policy.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences.xml"); dao.addAsPreferenceGroup(preferences); eu.primelife.ppl.policy.xacml.impl.PolicyType newPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup1"); RuleType rule = (RuleType) newPolicy .getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems() .get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule .getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertFalse(handy.getAuthzForDownstreamUsage().getIsAllowed()); Assert.assertEquals(2, handy.getAuthzForPurpose().getPurposes().size()); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup1", null); eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup1"); rule = (RuleType) changedPolicy .getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems() .get(0).getItemRule(); handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences() .getAuthorizationsSet()); Assert.assertTrue(handy.getAuthzForDownstreamUsage().getIsAllowed()); Assert.assertEquals(3, handy.getAuthzForPurpose().getPurposes().size()); } @Test public void testUpdatePreferenceGroup2() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { // test the case that the policy is null // the preferences must not be updated String policy = readFile("/testUpdatePrefGroups/demo_policy2.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences.xml"); dao.addAsPreferenceGroup(preferences); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup1", null); eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup1"); RuleType rule = (RuleType) changedPolicy .getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems() .get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule .getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertFalse(handy.isUnknown()); Assert.assertEquals(2, handy.getAuthzForPurpose().getPurposes().size()); } @Test public void testUpdatePreferenceGroup3() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { // test the case that the policy is empty String policy = readFile("/testUpdatePrefGroups/demo_policy3.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences.xml"); dao.addAsPreferenceGroup(preferences); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup1", null); eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup1"); RuleType rule = (RuleType) changedPolicy .getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems() .get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule .getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertFalse(handy.isUnknown()); Assert.assertEquals(2, handy.getAuthzForPurpose().getPurposes().size()); } @Test public void testUpdatePreferenceGroup4() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { // Test the case that the preferences are unknown, but the policy is set String policy = readFile("/testUpdatePrefGroups/demo_policy.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences2.xml"); dao.addAsPreferenceGroup(preferences); eu.primelife.ppl.policy.xacml.impl.PolicyType newPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup2"); RuleType rule = (RuleType) newPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertTrue(handy.isUnknown()); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup2", null); // after updating, the preference should contain the policy values eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup2"); rule = (RuleType) changedPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertTrue(handy.getAuthzForDownstreamUsage().getIsAllowed()); Assert.assertEquals(3, handy.getAuthzForPurpose().getPurposes().size()); } @Test public void testUpdatePreferenceGroup5() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { //Test that the preference has no downstream element and no useForPurpose element String policy = readFile("/testUpdatePrefGroups/demo_policy.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences3.xml"); dao.addAsPreferenceGroup(preferences); eu.primelife.ppl.policy.xacml.impl.PolicyType newPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup3"); RuleType rule = (RuleType) newPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertFalse(handy.getAuthzForDownstreamUsage().isDefined()); Assert.assertFalse(handy.getAuthzForPurpose().isDefined()); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup3", null); eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup3"); rule = (RuleType) changedPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertTrue(handy.getAuthzForDownstreamUsage().getIsAllowed()); Assert.assertEquals(3, handy.getAuthzForPurpose().getPurposes().size()); } @Test public void testUpdatePreferenceGroup6() throws IOException, JAXBException, SyntaxException, WritingException, org.herasaf.xacml.core.SyntaxException, ProcessingException, MissingAttributeException, DatatypeConfigurationException, MissingPreferenceGroupException { //Test that the preference has no downstream element and the policy does not allow dsu String policy = readFile("/testUpdatePrefGroups/demo_policy4.xml"); PolicyType preferences = readPolicyPrefs("/testUpdatePrefGroups/demo_preferences3.xml"); dao.addAsPreferenceGroup(preferences); eu.primelife.ppl.policy.xacml.impl.PolicyType newPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup3"); RuleType rule = (RuleType) newPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); HandyAuthorizationsSet handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertFalse(handy.getAuthzForDownstreamUsage().isDefined()); Assert.assertFalse(handy.getAuthzForPurpose().isDefined()); PEP pep = new PEP(); pep.updatePreferenceGroup(policy, "prefGroup3", null); eu.primelife.ppl.policy.xacml.impl.PolicyType changedPolicy = (eu.primelife.ppl.policy.xacml.impl.PolicyType) dao .getPreferenceGroupPolicy("prefGroup3"); rule = (RuleType) changedPolicy.getCombinerParametersOrRuleCombinerParametersOrVariableDefinitionItems().get(0).getItemRule(); handy = new HandyAuthorizationsSet(rule.getDataHandlingPreferences().getAuthorizationsSet()); Assert.assertTrue(handy.getAuthzForDownstreamUsage().isDefined()); Assert.assertFalse(handy.getAuthzForDownstreamUsage().getIsAllowed()); Assert.assertEquals(3, handy.getAuthzForPurpose().getPurposes().size()); } private String readFile(String path) throws IOException { InputStream in = getClass().getResourceAsStream(path); // read it with BufferedReader BufferedReader br = new BufferedReader(new InputStreamReader(in)); StringBuilder sb = new StringBuilder(); String line; while ((line = br.readLine()) != null) { sb.append(line); } br.close(); return sb.toString(); } }
/** * $URL$ * $Id$ * * Copyright (c) 2006-2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.sitestats.tool.wicket.pages; import java.text.Collator; import java.text.ParseException; import java.text.RuleBasedCollator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.wicket.AttributeModifier; import org.apache.wicket.PageParameters; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.datetime.StyleDateConverter; import org.apache.wicket.datetime.markup.html.form.DateTextField; import org.apache.wicket.extensions.markup.html.form.select.IOptionRenderer; import org.apache.wicket.extensions.markup.html.form.select.Select; import org.apache.wicket.extensions.markup.html.form.select.SelectOption; import org.apache.wicket.extensions.markup.html.form.select.SelectOptions; import org.apache.wicket.extensions.yui.calendar.DateTimeField; import org.apache.wicket.markup.html.IHeaderResponse; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.WebPage; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Button; import org.apache.wicket.markup.html.form.CheckBox; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.markup.html.form.TextArea; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.panel.FeedbackPanel; import org.apache.wicket.markup.repeater.RepeatingView; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.util.string.Strings; import org.sakaiproject.authz.api.Role; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.site.api.Group; import org.sakaiproject.site.api.Site; import org.sakaiproject.sitestats.api.PrefsData; import org.sakaiproject.sitestats.api.StatsManager; import org.sakaiproject.sitestats.api.event.EventInfo; import org.sakaiproject.sitestats.api.event.ToolInfo; import org.sakaiproject.sitestats.api.parser.EventParserTip; import org.sakaiproject.sitestats.api.report.ReportDef; import org.sakaiproject.sitestats.api.report.ReportManager; import org.sakaiproject.sitestats.api.report.ReportParams; import org.sakaiproject.sitestats.tool.facade.Locator; import org.sakaiproject.sitestats.tool.wicket.components.CSSFeedbackPanel; import org.sakaiproject.sitestats.tool.wicket.components.FileSelectorPanel; import org.sakaiproject.sitestats.tool.wicket.components.IStylableOptionRenderer; import org.sakaiproject.sitestats.tool.wicket.components.IndicatingAjaxDropDownChoice; import org.sakaiproject.sitestats.tool.wicket.components.LastJobRun; import org.sakaiproject.sitestats.tool.wicket.components.Menus; import org.sakaiproject.sitestats.tool.wicket.components.StylableSelectOptions; import org.sakaiproject.sitestats.tool.wicket.components.StylableSelectOptionsGroup; import org.sakaiproject.sitestats.tool.wicket.models.EventModel; import org.sakaiproject.sitestats.tool.wicket.models.ReportDefModel; import org.sakaiproject.sitestats.tool.wicket.models.ToolModel; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.util.Web; /** * @author Nuno Fernandes */ public class ReportsEditPage extends BasePage { private static final long serialVersionUID = 1L; private static Log LOG = LogFactory.getLog(ReportsEditPage.class); private static final String REPORT_THISSITE = "this"; private static final String REPORT_ALLSITES = "all"; private String realSiteId; private String siteId; private boolean predefined = false; private String reportSiteOpt = REPORT_THISSITE; private boolean visitsEnabled = true; private FeedbackPanel feedback = null; /** Options visiblity */ private boolean visitsVisible = true; private boolean activityVisible = true; private boolean resourcesVisible = true; private boolean presencesVisible = true; /** Report related */ private ReportDefModel reportDefModel; private PrefsData prefsdata = null; private WebPage returnPage; /** Ajax update lock */ private final ReentrantLock ajaxUpdateLock = new ReentrantLock(); private boolean usersLoaded = false; private static Log log = LogFactory.getLog(ReportsEditPage.class); private transient Collator collator = Collator.getInstance(); { try{ collator= new RuleBasedCollator(((RuleBasedCollator)Collator.getInstance()).getRules().replaceAll("<'\u005f'", "<' '<'\u005f'")); }catch(ParseException e){ log.error("Unable to create RuleBasedCollator"); } } public ReportsEditPage() { this(null, null, null); } public ReportsEditPage(ReportDefModel reportDef) { this(reportDef, null, null); } public ReportsEditPage(PageParameters pageParameters) { this(null, pageParameters, null); } public ReportsEditPage(ReportDefModel reportDef, PageParameters pageParameters, final WebPage returnPage) { realSiteId = Locator.getFacade().getToolManager().getCurrentPlacement().getContext(); if(pageParameters != null) { siteId = pageParameters.getString("siteId"); predefined = pageParameters.getBoolean("predefined"); } if(siteId == null) { siteId = realSiteId; } if(reportDef != null) { this.reportDefModel = reportDef; }else{ if(predefined) { this.reportDefModel = new ReportDefModel(null, null); }else{ this.reportDefModel = new ReportDefModel(siteId, siteId); } } if(returnPage == null) { this.returnPage = new ReportsPage(pageParameters); }else{ this.returnPage = returnPage; } boolean allowed = Locator.getFacade().getStatsAuthz().isUserAbleToViewSiteStats(siteId); if(allowed) { // options visibility visitsVisible = Locator.getFacade().getStatsManager().isEnableSiteVisits() && Locator.getFacade().getStatsManager().isVisitsInfoAvailable(); activityVisible = Locator.getFacade().getStatsManager().isEnableSiteActivity(); resourcesVisible = false; try{ resourcesVisible = Locator.getFacade().getStatsManager().isEnableResourceStats() && (Locator.getFacade().getSiteService().getSite(siteId).getToolForCommonId(StatsManager.RESOURCES_TOOLID) != null); }catch(Exception e) { resourcesVisible = false; } presencesVisible = Locator.getFacade().getStatsManager().isEnableSitePresences(); // render body renderBody(); }else{ setResponsePage(NotAuthorizedPage.class); } } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.renderJavascriptReference(JQUERYSCRIPT); response.renderJavascriptReference(StatsManager.SITESTATS_WEBAPP + "/script/reports.js"); StringBuilder onDomReady = new StringBuilder(); onDomReady.append("checkWhatSelection();"); onDomReady.append("checkWhenSelection();"); onDomReady.append("checkWhoSelection();"); onDomReady.append("checkHowSelection();"); onDomReady.append("checkReportDetails();"); onDomReady.append("checkHowChartSelection();"); response.renderOnDomReadyJavascript(onDomReady.toString()); } private void renderBody() { StatsManager statsManager = Locator.getFacade().getStatsManager(); // menu add(new Menus("menu", siteId)); // reportAction String action = null; if(getReportDef().isTitleLocalized()) { if(reportDefModel.isNew()) { action = (String) new ResourceModel("report_adding").getObject(); }else{ action = (String) new ResourceModel("report_editing").getObject(); } action = action.replaceAll("\\$\\{title\\}", (String) new ResourceModel(getReportDef().getTitleBundleKey()).getObject()); }else{ if(reportDefModel.isNew()) { action = new StringResourceModel("report_adding", this, reportDefModel).getString(); }else{ action = new StringResourceModel("report_editing", this, reportDefModel).getString(); } } add(new Label("reportAction", action)); // model visitsEnabled = statsManager.isEnableSiteVisits(); if(!visitsEnabled) { getReportParams().setWhat(ReportManager.WHAT_EVENTS_BYTOOL); } setDefaultModel(new CompoundPropertyModel(this)); // last job run add(new LastJobRun("lastJobRun", siteId)); // form Form form = new Form("reportsForm"); form.setOutputMarkupId(true); add(form); // feedback panel (messages) feedback = new CSSFeedbackPanel("messages"); feedback.setOutputMarkupId(true); form.add(feedback); // report details, what, when & who renderReportDetailsUI(form); renderWhatUI(form); renderWhenUI(form); renderWhoUI(form); renderHowUI(form); // buttons final Button generateReport = new Button("generateReport") { @Override public void onSubmit() { if(validReportParameters()) { if(predefined) { getReportParams().setSiteId(siteId); } setResponsePage(new ReportDataPage(reportDefModel, new PageParameters("siteId="+siteId), ReportsEditPage.this)); } super.onSubmit(); } }; form.add(generateReport); final Button saveReport = new Button("saveReport") { @Override public void onSubmit() { if(validReportParameters()) { if(getReportDef().getTitle() == null || getReportDef().getTitle().trim().length() == 0) { error((String) new ResourceModel("report_reporttitle_req").getObject()); }else{ if(predefined) { getReportParams().setSiteId(null); } boolean saved = Locator.getFacade().getReportManager().saveReportDefinition(getReportDef()); String titleStr = null; if(saved) { if(getReportDef().isTitleLocalized()) { titleStr = (String) new ResourceModel("report_save_success").getObject(); titleStr = titleStr.replaceAll("\\$\\{title\\}", (String) new ResourceModel(getReportDef().getTitleBundleKey()).getObject()); }else{ titleStr = new StringResourceModel("report_save_success", getPage(), reportDefModel).getString(); } info(titleStr); setResponsePage(returnPage); }else{ if(getReportDef().isTitleLocalized()) { titleStr = (String) new ResourceModel("report_save_error").getObject(); titleStr = titleStr.replaceAll("\\$\\{title\\}", (String) new ResourceModel(getReportDef().getTitleBundleKey()).getObject()); }else{ titleStr = new StringResourceModel("report_save_error", getPage(), reportDefModel).getString(); } error(titleStr); } } } super.onSubmit(); } }; saveReport.setVisible(!predefined || (predefined && Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage() && realSiteId.equals(siteId))); form.add(saveReport); final Button back = new Button("back") { @Override public void onSubmit() { reportDefModel.detach(); setResponsePage(returnPage); super.onSubmit(); } }; back.setDefaultFormProcessing(false); form.add(back); } @SuppressWarnings("serial") private void renderReportDetailsUI(Form form) { // top WebMarkupContainer reportDetailsTop = new WebMarkupContainer("reportDetailsTop"); WebMarkupContainer reportDetailsShow = new WebMarkupContainer("reportDetailsShow"); reportDetailsTop.add(reportDetailsShow); form.add(reportDetailsTop); WebMarkupContainer fakeReportDetails = new WebMarkupContainer("fakeReportDetails"); reportDetailsTop.add(fakeReportDetails); // details WebMarkupContainer reportDetails = new WebMarkupContainer("reportDetails"); form.add(reportDetails); // details: title TextField title = new TextField("reportDef.title"); reportDetails.add(title); final WebMarkupContainer titleLocalizedContainer = new WebMarkupContainer("titleLocalizedContainer"); titleLocalizedContainer.setOutputMarkupId(true); titleLocalizedContainer.setOutputMarkupPlaceholderTag(true); titleLocalizedContainer.setVisible(getReportDef().isTitleLocalized()); titleLocalizedContainer.add(new Label("titleLocalized")); reportDetails.add(titleLocalizedContainer); title.add(new AjaxFormComponentUpdatingBehavior("onchange") { @Override protected void onUpdate(AjaxRequestTarget target) { titleLocalizedContainer.setVisible(getReportDef().isTitleLocalized()); target.addComponent(titleLocalizedContainer); target.appendJavascript("setMainFrameHeightNoScroll(window.name);"); } }); // details: description TextArea description = new TextArea("reportDef.description"); reportDetails.add(description); final WebMarkupContainer descriptionLocalizedContainer = new WebMarkupContainer("descriptionLocalizedContainer"); descriptionLocalizedContainer.setOutputMarkupId(true); descriptionLocalizedContainer.setOutputMarkupPlaceholderTag(true); descriptionLocalizedContainer.setVisible(getReportDef().isDescriptionLocalized()); descriptionLocalizedContainer.add(new Label("descriptionLocalized")); reportDetails.add(descriptionLocalizedContainer); description.add(new AjaxFormComponentUpdatingBehavior("onchange") { @Override protected void onUpdate(AjaxRequestTarget target) { descriptionLocalizedContainer.setVisible(getReportDef().isDescriptionLocalized()); target.addComponent(descriptionLocalizedContainer); target.appendJavascript("setMainFrameHeightNoScroll(window.name);"); } }); // set visibility if(predefined) { if(Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage() && realSiteId.equals(siteId)) { reportDetailsTop.setVisible(true); reportDetailsShow.setVisible(false); reportDetails.setVisible(true); fakeReportDetails.setVisible(false); }else{ reportDetailsTop.setVisible(false); reportDetailsShow.setVisible(false); reportDetails.setVisible(false); } }else{ reportDetailsTop.setVisible(true); reportDetailsShow.setVisible(true); reportDetails.add(new AttributeModifier("style", true, new Model("display: none"))); } } @SuppressWarnings("serial") private void renderWhatUI(Form form) { // ------------------------------------------------------- // left panel // ------------------------------------------------------- // activity List<String> whatOptions = new ArrayList<String>(); if(visitsVisible) { whatOptions.add(ReportManager.WHAT_VISITS); } if(activityVisible) { whatOptions.add(ReportManager.WHAT_EVENTS); } if(resourcesVisible) { whatOptions.add(ReportManager.WHAT_RESOURCES); } if(presencesVisible) { whatOptions.add(ReportManager.WHAT_PRESENCES); } IChoiceRenderer whatChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(ReportManager.WHAT_VISITS.equals(object)) { return new ResourceModel("report_what_visits").getObject(); } if(ReportManager.WHAT_EVENTS.equals(object)) { return new ResourceModel("report_what_events").getObject(); } if(ReportManager.WHAT_RESOURCES.equals(object)) { return new ResourceModel("report_what_resources").getObject(); } if(ReportManager.WHAT_PRESENCES.equals(object)) { return new ResourceModel("report_what_presences").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice what = new DropDownChoice("reportParams.what", whatOptions, whatChoiceRenderer); what.setMarkupId("what"); what.setOutputMarkupId(true); form.add(what); // event selection type List<String> whatEventSelTypeOptions = Arrays.asList(ReportManager.WHAT_EVENTS_BYTOOL, ReportManager.WHAT_EVENTS_BYEVENTS); IChoiceRenderer whatEventSelTypeChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(ReportManager.WHAT_EVENTS_BYTOOL.equals(object)) { return new ResourceModel("report_what_events_bytool").getObject(); } if(ReportManager.WHAT_EVENTS_BYEVENTS.equals(object)) { return new ResourceModel("report_what_events_byevent").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice whatEventSelType = new DropDownChoice("reportParams.whatEventSelType", whatEventSelTypeOptions, whatEventSelTypeChoiceRenderer); whatEventSelType.setEscapeModelStrings(false); whatEventSelType.setMarkupId("whatEventSelType"); whatEventSelType.setOutputMarkupId(true); form.add(whatEventSelType); // tool selection Select whatToolIds = new Select("reportParams.whatToolIds"); RepeatingView selectOptionsRV1 = new RepeatingView("selectOptionsRV1"); whatToolIds.add(selectOptionsRV1); whatToolIds.add(new AttributeModifier("title", true, new ResourceModel("report_multiple_sel_instruction"))); addTools(selectOptionsRV1); form.add(whatToolIds); // event selection Select whatEventIds = new Select("reportParams.whatEventIds"); RepeatingView selectOptionsRV2 = new RepeatingView("selectOptionsRV2"); whatEventIds.add(selectOptionsRV2); whatEventIds.add(new AttributeModifier("title", true, new ResourceModel("report_multiple_sel_instruction"))); addEvents(selectOptionsRV2); form.add(whatEventIds); // resources selection boolean isSiteStatsAdminTool = Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage(); boolean showDefaultBaseFoldersOnly = isSiteStatsAdminTool && predefined && realSiteId.equals(siteId); CheckBox whatLimitedAction = new CheckBox("reportParams.whatLimitedAction"); whatLimitedAction.setMarkupId("whatLimitedAction"); whatLimitedAction.setOutputMarkupId(true); form.add(whatLimitedAction); CheckBox whatLimitedResourceIds = new CheckBox("reportParams.whatLimitedResourceIds"); whatLimitedResourceIds.setMarkupId("whatLimitedResourceIds"); whatLimitedResourceIds.setOutputMarkupId(true); form.add(whatLimitedResourceIds); final FileSelectorPanel whatResourceIds = new FileSelectorPanel("reportParams.whatResourceIds", siteId, showDefaultBaseFoldersOnly); whatResourceIds.setMarkupId("whatResourceIds"); whatResourceIds.setOutputMarkupId(true); form.add(whatResourceIds); whatResourceIds.setEnabled(true); // resource actions List<String> resourceActions = new ArrayList<String>(); resourceActions.add(ReportManager.WHAT_RESOURCES_ACTION_NEW); resourceActions.add(ReportManager.WHAT_RESOURCES_ACTION_READ); resourceActions.add(ReportManager.WHAT_RESOURCES_ACTION_REVS); resourceActions.add(ReportManager.WHAT_RESOURCES_ACTION_DEL); DropDownChoice whatResourceAction = new DropDownChoice("reportParams.whatResourceAction", resourceActions, new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(object == null){ return ""; }else{ return (String) new ResourceModel("action_" + ((String) object)).getObject(); } } public String getIdValue(Object object, int index) { return (String) object; } }) { @Override protected CharSequence getDefaultChoice(Object selected) { return ""; } }; whatResourceAction.setMarkupId("whatResourceAction"); whatResourceAction.setOutputMarkupId(true); form.add(whatResourceAction); } @SuppressWarnings("serial") private void renderWhenUI(Form form) { List<String> whenOptions = Arrays.asList( ReportManager.WHEN_ALL, ReportManager.WHEN_LAST7DAYS, ReportManager.WHEN_LAST30DAYS, ReportManager.WHEN_LAST365DAYS, ReportManager.WHEN_CUSTOM ); IChoiceRenderer whenChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(ReportManager.WHEN_ALL.equals(object)) { return new ResourceModel("report_when_all").getObject(); } if(ReportManager.WHEN_LAST7DAYS.equals(object)) { return new ResourceModel("report_when_last7days").getObject(); } if(ReportManager.WHEN_LAST30DAYS.equals(object)) { return new ResourceModel("report_when_last30days").getObject(); } if(ReportManager.WHEN_LAST365DAYS.equals(object)) { return new ResourceModel("report_when_last365days").getObject(); } if(ReportManager.WHEN_CUSTOM.equals(object)) { return new ResourceModel("report_when_custom").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice when = new DropDownChoice("reportParams.when", whenOptions, whenChoiceRenderer); when.setMarkupId("when"); when.setOutputMarkupId(true); form.add(when); // custom dates form.add(new DateTimeField("reportParams.whenFrom") { @Override protected DateTextField newDateTextField(String id, PropertyModel dateFieldModel) { return new DateTextField(id, dateFieldModel, new StyleDateConverter("S-", true)); } }); form.add(new DateTimeField("reportParams.whenTo") { @Override protected DateTextField newDateTextField(String id, PropertyModel dateFieldModel) { return new DateTextField(id, dateFieldModel, new StyleDateConverter("S-", true)); } }); } @SuppressWarnings("serial") private void renderWhoUI(Form form) { List<String> groups = getGroups(); final RepeatingView selectOptionsRV = new RepeatingView("selectOptionsRV"); final Select whoUserIds = new MultipleSelect("reportParams.whoUserIds"); // who List<String> whoOptions = new ArrayList<String>(); whoOptions.add(ReportManager.WHO_ALL); whoOptions.add(ReportManager.WHO_ROLE); whoOptions.add(ReportManager.WHO_CUSTOM); whoOptions.add(ReportManager.WHO_NONE); if(groups.size() > 0) { whoOptions.add(2, ReportManager.WHO_GROUPS); } IChoiceRenderer whoChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(ReportManager.WHO_ALL.equals(object)) { return new ResourceModel("report_who_all").getObject(); } if(ReportManager.WHO_ROLE.equals(object)) { return new ResourceModel("report_who_role").getObject(); } if(ReportManager.WHO_GROUPS.equals(object)) { return new ResourceModel("report_who_group").getObject(); } if(ReportManager.WHO_CUSTOM.equals(object)) { return new ResourceModel("report_who_custom").getObject(); } if(ReportManager.WHO_NONE.equals(object)) { return new ResourceModel("report_who_none").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; final IndicatingAjaxDropDownChoice who = new IndicatingAjaxDropDownChoice("reportParams.who", whoOptions, whoChoiceRenderer); who.add(new AjaxFormComponentUpdatingBehavior("onchange") { @Override protected void onUpdate(AjaxRequestTarget target) { if(ReportManager.WHO_CUSTOM.equals(getReportParams().getWho())) { addUsers(selectOptionsRV); whoUserIds.add(new AttributeModifier("style", true, new Model("width: 300px"))); who.remove(this); whoUserIds.add(new AttributeModifier("onchange", true, new Model("checkWhoSelection();"))); target.addComponent(who); target.addComponent(whoUserIds); } target.appendJavascript("checkWhoSelection();"); } @Override protected CharSequence generateCallbackScript(CharSequence partialCall) { CharSequence ajaxScript = super.generateCallbackScript(partialCall); StringBuilder b = new StringBuilder(); b.append("checkWhoSelection();"); b.append("if(jQuery('#who').val() == 'who-custom') {;"); b.append(ajaxScript); b.append("}"); return b.toString(); } }); who.setMarkupId("who"); who.setOutputMarkupId(true); form.add(who); // users selectOptionsRV.setRenderBodyOnly(true); selectOptionsRV.setEscapeModelStrings(true); whoUserIds.add(selectOptionsRV); whoUserIds.add(new AttributeModifier("title", true, new ResourceModel("report_multiple_sel_instruction"))); whoUserIds.setOutputMarkupId(true); whoUserIds.setOutputMarkupPlaceholderTag(true); whoUserIds.setEscapeModelStrings(true); form.add(whoUserIds); boolean preloadData = ReportManager.WHO_CUSTOM.equals(getReportParams().getWho()); if(preloadData) { addUsers(selectOptionsRV); } // roles List<String> roles = getRoles(); IChoiceRenderer rolesRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { return (String) object; } public String getIdValue(Object object, int index) { return (String) object; } }; Collections.sort(roles, getChoiceRendererComparator(collator, rolesRenderer)); DropDownChoice whoRoleId = new DropDownChoice("reportParams.whoRoleId", roles, rolesRenderer); whoRoleId.setEnabled(roles.size() > 0); if(getReportParams().getWhoRoleId() == null) { if(roles.size() > 0) { getReportParams().setWhoRoleId(roles.get(0)); }else{ getReportParams().setWhoRoleId(""); } } form.add(whoRoleId); // groups WebMarkupContainer whoGroupTr = new WebMarkupContainer("who-groups-tr"); form.add(whoGroupTr); IChoiceRenderer groupsRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { try{ return Locator.getFacade().getSiteService().getSite(siteId).getGroup((String) object).getTitle(); }catch(IdUnusedException e){ return (String) object; } } public String getIdValue(Object object, int index) { return (String) object; } }; Collections.sort(groups, getChoiceRendererComparator(collator, groupsRenderer)); DropDownChoice whoGroupId = new DropDownChoice("reportParams.whoGroupId", groups, groupsRenderer); if(groups.size() == 0) { whoGroupTr.setVisible(false); }else{ if(getReportParams().getWhoGroupId() == null) { if(groups.size() > 0) { getReportParams().setWhoGroupId(groups.get(0)); }else{ getReportParams().setWhoGroupId(""); } } } whoGroupTr.add(whoGroupId); } @SuppressWarnings("serial") private void renderHowUI(Form form) { boolean isSiteStatsAdminTool = Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage(); boolean renderSiteSelectOption = Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage() && !predefined && realSiteId.equals(siteId); boolean renderSiteSortOption = isSiteStatsAdminTool && !predefined && realSiteId.equals(siteId); boolean renderSortAscendingOption = isSiteStatsAdminTool && predefined && realSiteId.equals(siteId); // common IChoiceRenderer allColumnsChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(object != null) { String id = (String) object; if(ReportManager.HOW_SORT_DEFAULT.equals(id)) { return (String) new ResourceModel("default").getObject(); } if(StatsManager.T_NONE.equals(id)) { return (String) new ResourceModel("none").getObject(); } if(StatsManager.T_SITE.equals(id)) { return (String) new ResourceModel("report_option_site").getObject(); } if(StatsManager.T_USER.equals(id)) { return (String) new ResourceModel("report_option_user").getObject(); } if(StatsManager.T_TOOL.equals(id)) { return (String) new ResourceModel("report_option_tool").getObject(); } if(StatsManager.T_EVENT.equals(id)) { return (String) new ResourceModel("report_option_event").getObject(); } if(StatsManager.T_RESOURCE.equals(id)) { return (String) new ResourceModel("report_option_resource").getObject(); } if(StatsManager.T_RESOURCE_ACTION.equals(id)) { return (String) new ResourceModel("report_option_resourceaction").getObject(); } if(StatsManager.T_DATE.equals(id)) { return (String) new ResourceModel("report_option_date").getObject(); } if(StatsManager.T_TOTAL.equals(id)) { return (String) new ResourceModel("report_option_total").getObject(); } } return (String) new ResourceModel("default").getObject(); } public String getIdValue(Object object, int index) { return (String) object; } }; // site to report WebMarkupContainer siteContainer = new WebMarkupContainer("siteContainer"); siteContainer.setVisible(renderSiteSelectOption); form.add(siteContainer); List<String> reportSiteOptions = Arrays.asList(REPORT_THISSITE, REPORT_ALLSITES); IChoiceRenderer reportSiteRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(REPORT_THISSITE.equals(object)) { return (String) new ResourceModel("report_reportsite_this").getObject(); } if(REPORT_ALLSITES.equals(object)) { return (String) new ResourceModel("report_reportsite_all").getObject(); } return (String) new ResourceModel("report_reportsite_this").getObject(); } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice reportSite = new DropDownChoice("reportSite",new PropertyModel(this, "reportSite") , reportSiteOptions, reportSiteRenderer); reportSite.setMarkupId("reportSite"); reportSite.setOutputMarkupId(true); siteContainer.add(reportSite); if(getReportParams().getSiteId() == null) { this.reportSiteOpt = REPORT_ALLSITES; }else { this.reportSiteOpt = REPORT_THISSITE; } // totals by Select howTotalsBy = new Select("reportParams.howTotalsBy"); howTotalsBy.setRequired(true); howTotalsBy.setMarkupId("howTotalsBy"); howTotalsBy.setOutputMarkupId(true); form.add(howTotalsBy); RepeatingView howTotalsByOptions = new RepeatingView("howTotalsByOptions"); howTotalsBy.add(howTotalsByOptions); addGroupOptions(howTotalsByOptions); // sorting WebMarkupContainer trSortBy = new WebMarkupContainer("trSortBy"); trSortBy.setVisible(renderSortAscendingOption); form.add(trSortBy); CheckBox howSortCheck = new CheckBox("reportParams.howSort"); howSortCheck.setMarkupId("howSortCheck"); howSortCheck.setOutputMarkupId(true); trSortBy.add(howSortCheck); // sort options List<String> sortOptions = null; if(renderSiteSortOption) { sortOptions = Arrays.asList(/*StatsManager.T_USER,*/ StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE, StatsManager.T_TOTAL, StatsManager.T_SITE); }else{ sortOptions = Arrays.asList(/*StatsManager.T_USER,*/ StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE, StatsManager.T_TOTAL); } DropDownChoice howSortBy = new DropDownChoice("reportParams.howSortBy", sortOptions, allColumnsChoiceRenderer); howSortBy.setMarkupId("howSortBy"); howSortBy.setOutputMarkupId(true); trSortBy.add(howSortBy); CheckBox howSortAscending = new CheckBox("reportParams.howSortAscending"); howSortAscending.setMarkupId("howSortAscending"); howSortAscending.setOutputMarkupId(true); trSortBy.add(howSortAscending); // max results CheckBox howMaxResultsCheck = new CheckBox("reportParams.howLimitedMaxResults"); howMaxResultsCheck.setMarkupId("howMaxResultsCheck"); howMaxResultsCheck.setOutputMarkupId(true); form.add(howMaxResultsCheck); TextField howMaxResults = new TextField("reportParams.howMaxResults",int.class) { @Override public String getInput() { String[] input = getInputAsArray(); if(input == null || input.length == 0){ return "0"; }else{ return trim(input[0]); } } }; howMaxResults.setMarkupId("howMaxResults"); howMaxResults.setOutputMarkupId(true); form.add(howMaxResults); // presentation List<String> howPresentationOptions = Arrays.asList(ReportManager.HOW_PRESENTATION_TABLE, ReportManager.HOW_PRESENTATION_CHART, ReportManager.HOW_PRESENTATION_BOTH); IChoiceRenderer howPresentationChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(ReportManager.HOW_PRESENTATION_TABLE.equals(object)) { return new ResourceModel("report_howpresentation_table").getObject(); } if(ReportManager.HOW_PRESENTATION_CHART.equals(object)) { return new ResourceModel("report_howpresentation_chart").getObject(); } if(ReportManager.HOW_PRESENTATION_BOTH.equals(object)) { return new ResourceModel("report_howpresentation_both").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice howPresentation = new DropDownChoice("reportParams.howPresentationMode", howPresentationOptions, howPresentationChoiceRenderer); howPresentation.setMarkupId("howPresentation"); howPresentation.setOutputMarkupId(true); form.add(howPresentation); // chart type List<String> howChartTypeOptions = Arrays.asList( StatsManager.CHARTTYPE_BAR, /*StatsManager.CHARTTYPE_LINE,*/ StatsManager.CHARTTYPE_PIE, StatsManager.CHARTTYPE_TIMESERIES, StatsManager.CHARTTYPE_TIMESERIESBAR); IChoiceRenderer howChartTypeChoiceRenderer = new IChoiceRenderer() { public Object getDisplayValue(Object object) { if(StatsManager.CHARTTYPE_BAR.equals(object)) { return new ResourceModel("report_howchart_bar").getObject(); } if(StatsManager.CHARTTYPE_LINE.equals(object)) { return new ResourceModel("report_howchart_line").getObject(); } if(StatsManager.CHARTTYPE_PIE.equals(object)) { return new ResourceModel("report_howchart_pie").getObject(); } if(StatsManager.CHARTTYPE_TIMESERIES.equals(object)) { return new ResourceModel("report_howchart_timeseries").getObject(); } if(StatsManager.CHARTTYPE_TIMESERIESBAR.equals(object)) { return new ResourceModel("report_howchart_timeseries_bar").getObject(); } return object; } public String getIdValue(Object object, int index) { return (String) object; } }; DropDownChoice howChartType = new DropDownChoice("reportParams.howChartType", howChartTypeOptions, howChartTypeChoiceRenderer); howChartType.setMarkupId("howChartType"); howChartType.setOutputMarkupId(true); form.add(howChartType); // chart source, chart series List<String> howChartSourceOptions = null; List<String> howChartCategorySourceOptions = null; List<String> howChartSeriesSourceOptions = null; if(renderSiteSortOption) { howChartSourceOptions = Arrays.asList(StatsManager.T_SITE, StatsManager.T_USER, StatsManager.T_TOOL, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE); howChartCategorySourceOptions = Arrays.asList(StatsManager.T_NONE, StatsManager.T_SITE, StatsManager.T_USER, StatsManager.T_TOOL, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE); howChartSeriesSourceOptions = Arrays.asList(StatsManager.T_SITE, StatsManager.T_USER, StatsManager.T_TOOL, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_TOTAL); }else{ howChartSourceOptions = Arrays.asList(StatsManager.T_USER, StatsManager.T_TOOL, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE); howChartCategorySourceOptions = Arrays.asList(StatsManager.T_NONE, StatsManager.T_TOOL, StatsManager.T_USER, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_DATE); howChartSeriesSourceOptions = Arrays.asList(StatsManager.T_USER, StatsManager.T_TOOL, StatsManager.T_EVENT, StatsManager.T_RESOURCE, StatsManager.T_RESOURCE_ACTION, StatsManager.T_TOTAL); } DropDownChoice howChartSource = new DropDownChoice("reportParams.howChartSource", howChartSourceOptions, allColumnsChoiceRenderer); howChartSource.setMarkupId("howChartSource"); howChartSource.setOutputMarkupId(true); form.add(howChartSource); DropDownChoice howChartCategorySource = new DropDownChoice("reportParams.howChartCategorySource", howChartCategorySourceOptions, allColumnsChoiceRenderer); howChartCategorySource.setMarkupId("howChartCategorySource"); howChartCategorySource.setOutputMarkupId(true); form.add(howChartCategorySource); DropDownChoice howChartSeriesSource = new DropDownChoice("reportParams.howChartSeriesSource", howChartSeriesSourceOptions, allColumnsChoiceRenderer); howChartSeriesSource.setMarkupId("howChartSeriesSource"); howChartSeriesSource.setOutputMarkupId(true); form.add(howChartSeriesSource); } @SuppressWarnings("serial") private void addTools(final RepeatingView rv) { List<SelectOption> tools = new ArrayList<SelectOption>(); List<ToolInfo> siteTools = Locator.getFacade().getEventRegistryService().getEventRegistry(siteId, getPrefsdata().isListToolEventsOnlyAvailableInSite()); Iterator<ToolInfo> i = siteTools.iterator(); // add tools while(i.hasNext()){ final ToolInfo toolInfo = i.next(); if(isToolSuported(toolInfo)) { tools.add(new SelectOption("option", new ToolModel(toolInfo))); } } WebMarkupContainer optgroupItem = new WebMarkupContainer(rv.newChildId()); optgroupItem.setRenderBodyOnly(true); rv.add(optgroupItem); IStylableOptionRenderer optionRenderer = new IStylableOptionRenderer() { public String getDisplayValue(Object object) { SelectOption opt = (SelectOption) object; return ((ToolModel) opt.getDefaultModel()).getToolName(); } public IModel getModel(Object value) { SelectOption opt = (SelectOption) value; return new Model(((ToolModel) opt.getDefaultModel()).getToolId()); } public String getStyle(Object object) { SelectOption opt = (SelectOption) object; ToolModel toolModel = (ToolModel) opt.getDefaultModel(); String toolId = toolModel.getToolId(); if(!ReportManager.WHAT_EVENTS_ALLTOOLS.equals(toolId)) { String toolIconPath = "background-image: url(" + Locator.getFacade().getEventRegistryService().getToolIcon(toolId) + ");"; String style = "background-position:left center; background-repeat:no-repeat; margin-left:3px; padding-left:20px; "+toolIconPath; return style; } return null; } }; Collections.sort(tools, getOptionRendererComparator(collator, optionRenderer)); // "all" tools (insert in position 0 tools.add(0, new SelectOption("option", new ToolModel(ReportManager.WHAT_EVENTS_ALLTOOLS, ReportManager.WHAT_EVENTS_ALLTOOLS))); StylableSelectOptions selectOptions = new StylableSelectOptions("selectOptions", tools, optionRenderer); selectOptions.setRenderBodyOnly(true); optgroupItem.add(selectOptions); } @SuppressWarnings("serial") private void addEvents(final RepeatingView rv) { List<ToolInfo> siteTools = Locator.getFacade().getEventRegistryService().getEventRegistry(siteId, getPrefsdata().isListToolEventsOnlyAvailableInSite()); Collections.sort(siteTools, getToolInfoComparator(collator)); // add events Iterator<ToolInfo> i = siteTools.iterator(); while(i.hasNext()){ ToolInfo toolInfo = i.next(); if(isToolSuported(toolInfo)) { List<EventInfo> eventInfos = toolInfo.getEvents(); List<SelectOption> events = new ArrayList<SelectOption>(); Iterator<EventInfo> iE = eventInfos.iterator(); while(iE.hasNext()){ EventInfo e = iE.next(); SelectOption opt = new SelectOption("option", new EventModel(e)); events.add(opt); } WebMarkupContainer optgroupItem = new WebMarkupContainer(rv.newChildId()); optgroupItem.setRenderBodyOnly(true); rv.add(optgroupItem); String toolIconPath = "background-image: url(" + Locator.getFacade().getEventRegistryService().getToolIcon(toolInfo.getToolId()) + ");"; String style = "background-position:left top; background-repeat:no-repeat; margin-left:3px; padding-left:20px; "+toolIconPath; String toolName = Locator.getFacade().getEventRegistryService().getToolName(toolInfo.getToolId()); StylableSelectOptionsGroup group = new StylableSelectOptionsGroup("group", new Model(toolName), new Model(style)); optgroupItem.add(group); SelectOptions selectOptions = new SelectOptions("selectOptions", events, new IOptionRenderer() { public String getDisplayValue(Object object) { SelectOption opt = (SelectOption) object; return ((EventModel) opt.getDefaultModel()).getEventName(); } public IModel getModel(Object value) { SelectOption opt = (SelectOption) value; return new Model(((EventModel) opt.getDefaultModel()).getEventId()); } }); selectOptions.setRenderBodyOnly(true); group.add(selectOptions); } } } @SuppressWarnings("serial") private void addUsers(final RepeatingView rv) { if(usersLoaded) { return; } ajaxUpdateLock.lock(); try{ List<SelectOption> users = new ArrayList<SelectOption>(); // anonymous access if(Locator.getFacade().getStatsManager().isShowAnonymousAccessEvents()) { SelectOption anon = new SelectOption("option", new Model("?")); users.add(anon); } // site users Set<String> siteUsers = null; try{ siteUsers = Locator.getFacade().getSiteService().getSite(siteId).getUsers(); }catch(IdUnusedException e){ LOG.warn("Site does not exist: " + siteId); siteUsers = new HashSet<String>(); } Iterator<String> i = siteUsers.iterator(); while(i.hasNext()){ String userId = i.next(); if(userId != null) { SelectOption opt = new SelectOption("option", new Model(userId)); opt.setEscapeModelStrings(true); users.add(opt); } } WebMarkupContainer optgroupItem = new WebMarkupContainer(rv.newChildId()); optgroupItem.setRenderBodyOnly(true); rv.add(optgroupItem); IOptionRenderer optionRenderer = new IOptionRenderer() { public String getDisplayValue(Object object) { SelectOption opt = (SelectOption) object; String userId = (String) opt.getDefaultModel().getObject(); if(("?").equals(userId)) { return Web.escapeHtml( (String) new ResourceModel("user_anonymous_access").getObject() ); }else{ User u = null; try{ u = Locator.getFacade().getUserDirectoryService().getUser(userId); }catch(UserNotDefinedException e){ return Web.escapeHtml(userId); } StringBuilder buff = new StringBuilder(); buff.append(Locator.getFacade().getStatsManager().getUserNameForDisplay(u)); buff.append(" ("); buff.append(u.getDisplayId()); buff.append(")"); return Web.escapeHtml(buff.toString()); } } public IModel getModel(Object value) { SelectOption opt = (SelectOption) value; return new Model( (String) opt.getDefaultModel().getObject() ); } }; Collections.sort(users, getOptionRendererComparator(collator, optionRenderer)); SelectOptions selectOptions = new SelectOptions("selectOptions", users, optionRenderer); selectOptions.setRenderBodyOnly(true); optgroupItem.add(selectOptions); usersLoaded = true; }finally{ ajaxUpdateLock.unlock(); } } @SuppressWarnings("serial") private void addGroupOptions(final RepeatingView rv) { boolean isSiteStatsAdminTool = Locator.getFacade().getStatsAuthz().isSiteStatsAdminPage(); boolean renderAdminOptions = isSiteStatsAdminTool && !predefined && realSiteId.equals(siteId); List<String> totalsOptions = new ArrayList<String>(); totalsOptions.add(StatsManager.T_USER); totalsOptions.add(StatsManager.T_TOOL); totalsOptions.add(StatsManager.T_EVENT); totalsOptions.add(StatsManager.T_RESOURCE); totalsOptions.add(StatsManager.T_RESOURCE_ACTION); totalsOptions.add(StatsManager.T_DATE); if(renderAdminOptions) { totalsOptions.add(StatsManager.T_SITE); } // add grouping options List<SelectOption> selectOptionList = new ArrayList<SelectOption>(); Iterator<String> i = totalsOptions.iterator(); while(i.hasNext()){ String totalOpt = i.next(); SelectOption so = new SelectOption("option", new Model(totalOpt)); so.setEscapeModelStrings(false); selectOptionList.add(so); } WebMarkupContainer optgroupItem = new WebMarkupContainer(rv.newChildId()); optgroupItem.setRenderBodyOnly(true); rv.add(optgroupItem); final IOptionRenderer optionRenderer = new IOptionRenderer() { public String getDisplayValue(Object o) { SelectOption opt = (SelectOption) o; Object object = opt.getDefaultModel().getObject(); if(StatsManager.T_USER.equals(object)) { return (String) new ResourceModel("report_option_user").getObject(); } if(StatsManager.T_TOOL.equals(object)) { return (String) new ResourceModel("report_option_tool").getObject(); } if(StatsManager.T_EVENT.equals(object)) { return (String) new ResourceModel("report_option_event").getObject(); } if(StatsManager.T_RESOURCE.equals(object)) { return (String) new ResourceModel("report_option_resource").getObject(); } if(StatsManager.T_RESOURCE_ACTION.equals(object)) { return (String) new ResourceModel("report_option_resourceaction").getObject(); } if(StatsManager.T_DATE.equals(object)) { return (String) new ResourceModel("report_option_date").getObject(); } if(StatsManager.T_LASTDATE.equals(object)) { return (String) new ResourceModel("report_option_lastdate").getObject(); } if(StatsManager.T_SITE.equals(object)) { return (String) new ResourceModel("report_option_site").getObject(); } return (String) object; } public IModel getModel(Object value) { SelectOption opt = (SelectOption) value; return opt.getDefaultModel(); } }; SelectOptions selectOptions = new SelectOptions("selectOptions", selectOptionList, optionRenderer); selectOptions.setRenderBodyOnly(true); selectOptions.setEscapeModelStrings(false); optgroupItem.add(selectOptions); } private List<String> getGroups() { List<String> groups = new ArrayList<String>(); try{ Collection<Group> groupCollection = Locator.getFacade().getSiteService().getSite(siteId).getGroups(); Iterator<Group> i = groupCollection.iterator(); while(i.hasNext()){ Group g = i.next(); groups.add(g.getId()); } }catch(IdUnusedException e){ LOG.warn("Site does not exist: " + siteId); } return groups; } private List<String> getRoles() { List<String> roles = new ArrayList<String>(); try{ Set<Role> roleSet = Locator.getFacade().getSiteService().getSite(siteId).getRoles(); Iterator<Role> i = roleSet.iterator(); while(i.hasNext()){ Role r = i.next(); roles.add(r.getId()); } }catch(IdUnusedException e){ LOG.warn("Site does not exist: " + siteId); } return roles; } private boolean isToolSuported(final ToolInfo toolInfo) { if(Locator.getFacade().getStatsManager().isEventContextSupported()){ return true; }else{ List<ToolInfo> siteTools = Locator.getFacade().getEventRegistryService().getEventRegistry(siteId, getPrefsdata().isListToolEventsOnlyAvailableInSite()); Iterator<ToolInfo> i = siteTools.iterator(); while (i.hasNext()){ ToolInfo t = i.next(); if(t.getToolId().equals(toolInfo.getToolId())){ EventParserTip parserTip = t.getEventParserTip(); if(parserTip != null && parserTip.getFor().equals(StatsManager.PARSERTIP_FOR_CONTEXTID)){ return true; } } } } return false; } public static final Comparator<String> getStringComparator(final Collator collator){ return new Comparator<String>(){ public int compare(String o1, String o2) { return collator.compare(o1, o2); } }; } public static final Comparator<ToolInfo> getToolInfoComparator(final Collator collator){ return new Comparator<ToolInfo>(){ public int compare(ToolInfo o1, ToolInfo o2) { String toolName1 = Locator.getFacade().getEventRegistryService().getToolName(o1.getToolId()); String toolName2 = Locator.getFacade().getEventRegistryService().getToolName(o2.getToolId()); return collator.compare(toolName1, toolName2); } }; } public static final Comparator<Object> getOptionRendererComparator(final Collator collator, final IOptionRenderer renderer){ return new Comparator<Object>(){ public int compare(Object o1, Object o2) { return collator.compare( renderer.getDisplayValue(o1), renderer.getDisplayValue(o2) ); } }; } public static final Comparator<Object> getChoiceRendererComparator(final Collator collator, final IChoiceRenderer renderer){ return new Comparator<Object>(){ public int compare(Object o1, Object o2) { return collator.compare( renderer.getDisplayValue(o1), renderer.getDisplayValue(o2) ); } }; } private PrefsData getPrefsdata() { if(prefsdata == null) { prefsdata = Locator.getFacade().getStatsManager().getPreferences(siteId, true); } return prefsdata; } private boolean validReportParameters() { Site site = null; try{ site = Locator.getFacade().getSiteService().getSite(siteId); }catch(IdUnusedException e){ LOG.error("No site with id: "+siteId); } // check WHAT if(getReportParams().getWhat().equals(ReportManager.WHAT_EVENTS) && getReportParams().getWhatEventSelType().equals(ReportManager.WHAT_EVENTS_BYTOOL) && (getReportParams().getWhatToolIds() == null || getReportParams().getWhatToolIds().size() == 0)){ error((String) new ResourceModel("report_err_notools").getObject()); } if(getReportParams().getWhat().equals(ReportManager.WHAT_EVENTS) && getReportParams().getWhatEventSelType().equals(ReportManager.WHAT_EVENTS_BYEVENTS) && (getReportParams().getWhatEventIds() == null || getReportParams().getWhatEventIds().size() == 0)) { error((String) new ResourceModel("report_err_noevents").getObject()); } if(getReportParams().getWhat().equals(ReportManager.WHAT_RESOURCES) && getReportParams().isWhatLimitedResourceIds() && (getReportParams().getWhatResourceIds() == null || getReportParams().getWhatResourceIds().size() == 0)){ error((String) new ResourceModel("report_err_noresources").getObject()); } // check WHEN if(getReportParams().getWhen().equals(ReportManager.WHEN_CUSTOM) && (getReportParams().getWhenFrom() == null || getReportParams().getWhenTo() == null)) { error((String) new ResourceModel("report_err_nocustomdates").getObject()); } // check WHO if(getReportParams().getWho().equals(ReportManager.WHO_ROLE)){ if(site.getUsersHasRole(getReportParams().getWhoRoleId()).isEmpty()) error((String) new ResourceModel("report_err_emptyrole").getObject()); }else if(getReportParams().getWho().equals(ReportManager.WHO_GROUPS)){ if(getReportParams().getWhoGroupId() == null || getReportParams().getWhoGroupId().equals("")) error((String) new ResourceModel("report_err_nogroup").getObject()); else if(site.getGroup(getReportParams().getWhoGroupId()).getUsers().isEmpty()) error((String) new ResourceModel("report_err_emptygroup").getObject()); }else if(getReportParams().getWho().equals(ReportManager.WHO_CUSTOM) && (getReportParams().getWhoUserIds() == null || getReportParams().getWhoUserIds().size() == 0)){ error((String) new ResourceModel("report_err_nousers").getObject()); } // check HOW if(getReportParams().getHowTotalsBy() != null){ if(getReportParams().getHowSortBy().length() == 0) { error((String) new ResourceModel("report_err_totalsbynone").getObject()); } if(getReportParams().getWhat().equals(ReportManager.WHAT_EVENTS) && (getReportParams().getHowTotalsBy().contains(StatsManager.T_RESOURCE) || getReportParams().getHowTotalsBy().contains(StatsManager.T_RESOURCE_ACTION) )) { error((String) new ResourceModel("report_err_totalsbyevent").getObject()); }else if(getReportParams().getWhat().equals(ReportManager.WHAT_RESOURCES) && getReportParams().getHowTotalsBy().contains(StatsManager.T_EVENT)) { error((String) new ResourceModel("report_err_totalsbyresource").getObject()); } } if(getReportParams().isHowSort() && getReportParams().getHowSortBy() != null && !getReportParams().getHowSortBy().equals(ReportManager.HOW_SORT_DEFAULT)){ if(!StatsManager.T_TOTAL.equals(getReportParams().getHowSortBy()) && !getReportParams().getHowTotalsBy().contains(getReportParams().getHowSortBy()) ){ getReportParams().setHowSort(false); getReportParams().setHowSortBy(null); } } return !hasErrorMessage(); } public String getReportSite() { return reportSiteOpt; } public void setReportSite(String reportSiteOpt) { this.reportSiteOpt = reportSiteOpt; if(REPORT_THISSITE.equals(reportSiteOpt)) { getReportParams().setSiteId(siteId); }else if(REPORT_ALLSITES.equals(reportSiteOpt)) { getReportParams().setSiteId(null); } } public String getTitleLocalized() { return (String) new ResourceModel(getReportDef().getTitleBundleKey()).getObject(); } public String getDescriptionLocalized() { return (String) new ResourceModel(getReportDef().getDescriptionBundleKey()).getObject(); } public ReportDef getReportDef() { return (ReportDef) this.reportDefModel.getObject(); } public void setReportParams(ReportParams reportParams) { getReportDef().setReportParams(reportParams); } public ReportParams getReportParams() { return getReportDef().getReportParams(); } /** Subclass of Select that fixes behavior when used with AjaxFormChoiceComponentUpdatingBehavior.*/ static class MultipleSelect extends Select { private static final long serialVersionUID = 1L; public MultipleSelect(String id) { super(id); } @Override public void updateModel() { Object converted = getConvertedInput(); Collection modelCollection = new ArrayList(); modelChanging(); if(converted != null){ modelCollection.addAll((Collection) converted); } modelChanged(); getModel().setObject(modelCollection); } } }
/* ******************************************************************* * * Copyright 2016 Samsung Electronics All Rights Reserved. * *-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.cloud.accountserver.resources; import java.util.HashMap; import java.util.List; import org.iotivity.cloud.accountserver.AccountServerManager; import org.iotivity.cloud.accountserver.Const; import org.iotivity.cloud.accountserver.util.CoapMessageBuilder; import org.iotivity.cloud.accountserver.util.JSONUtil; import org.iotivity.cloud.base.Resource; import org.iotivity.cloud.base.protocols.coap.CoapRequest; import org.iotivity.cloud.base.protocols.coap.CoapResponse; import org.iotivity.cloud.base.protocols.coap.enums.CoapMethod; import org.iotivity.cloud.base.protocols.coap.enums.CoapStatus; import org.iotivity.cloud.util.Logger; import io.netty.channel.ChannelHandlerContext; /** * * This class provides a set of APIs to register account information of * authorized user. * */ public class AuthResource extends Resource { public AuthResource() { setUri(Const.AUTH_URI); } @Override public void onRequestReceived(ChannelHandlerContext ctx, CoapRequest request) { Logger.d("AuthResource IN"); if (ctx == null || request == null) { Logger.d("ctx or request msg is null"); } else { CoapMethod method = request.getRequestMethod(); switch (method) { case POST: try { handlePostRequest(ctx, request); } catch (Exception e) { e.printStackTrace(); } break; default: Logger.w("method(" + method + ") is not supported"); break; } } } /** * API for handling POST message * * @param ctx * ChannelHandlerContext of request message * @param request * CoAP request message * @throws Exception */ private void handlePostRequest(ChannelHandlerContext ctx, CoapRequest request) throws Exception { String reqType = extractQuery(request, Const.REQ_TYPE); if (reqType == null) throw new IllegalArgumentException("request type is null in query!"); CoapResponse response = null; switch (reqType) { case Const.TYPE_REGISTER: response = handleRegisterRequest(request); break; case Const.TYPE_LOGIN: response = handleLoginRequest(request); break; default: throw new IllegalArgumentException( "request type is not supported"); } ctx.write(response); } private CoapResponse handleLoginRequest(CoapRequest request) { String payload = request.getPayloadString(); JSONUtil util = new JSONUtil(); String sessionCode = util .parseJSON(payload, Const.REQUEST_SESSION_CODE); Logger.d("sessionCode: " + sessionCode); AccountServerManager oauthServerManager = new AccountServerManager(); String userId = oauthServerManager.requestUserId(sessionCode); Logger.d("userId: " + userId); CoapMessageBuilder responseMessage = new CoapMessageBuilder(); CoapResponse coapResponse = null; if (userId != null) { ResponseObject response = new ResponseObject(); response.setUserId(userId); String responseJson = convertLoginResponseToJson(response); Logger.d("responseJson: " + responseJson); coapResponse = responseMessage.buildCoapResponse( request.getToken(), responseJson, CoapStatus.CREATED); } else { coapResponse = responseMessage.buildCoapResponse( request.getToken(), CoapStatus.INTERNAL_SERVER_ERROR); } return coapResponse; } private CoapResponse handleRegisterRequest(CoapRequest request) { String payload = request.getPayloadString(); JSONUtil util = new JSONUtil(); String authCode = util.parseJSON(payload, Const.REQUEST_AUTH_CODE); String authServer = util.parseJSON(payload, Const.REQUEST_AUTH_SERVER); Logger.d("authCode: " + authCode + ", authServer: " + authServer); AccountServerManager oauthServerManager = new AccountServerManager(); String userId = oauthServerManager.requestUserId(authCode, authServer); String sessionCode = oauthServerManager.registerUserAccount(userId); Logger.d("userId: " + userId + ", sessionCode: " + sessionCode); CoapMessageBuilder responseMessage = new CoapMessageBuilder(); CoapResponse coapResponse = null; if (userId != null && sessionCode != null) { ResponseObject response = new ResponseObject(); response.setSessionCode(sessionCode); response.setUserId(userId); String responseJson = convertRegisterResponseToJson(response); Logger.d("responseJson: " + responseJson); coapResponse = responseMessage.buildCoapResponse( request.getToken(), responseJson, CoapStatus.CREATED); } else { coapResponse = responseMessage.buildCoapResponse( request.getToken(), CoapStatus.UNAUTHORIZED); } return coapResponse; } private String convertRegisterResponseToJson(ResponseObject response) { HashMap<Object, Object> responseMap = new HashMap<Object, Object>(); String sessionCode = response.getSessionCode(); String userId = response.getUserId(); if (userId != null) responseMap.put(Const.RESPONSE_USER_ID, userId); if (sessionCode != null) responseMap.put(Const.RESPONSE_SESSION_CODE, sessionCode); JSONUtil jsonUtil = new JSONUtil(); String responseJson = jsonUtil.writeJSON(responseMap); return responseJson; } private String convertLoginResponseToJson(ResponseObject response) { HashMap<Object, Object> responseMap = new HashMap<Object, Object>(); String userId = response.getUserId(); if (userId != null) responseMap.put(Const.RESPONSE_USER_ID, userId); JSONUtil jsonUtil = new JSONUtil(); String responseJson = jsonUtil.writeJSON(responseMap); return responseJson; } private String extractQuery(CoapRequest request, String key) { String value = null; List<String> Segments = request.getUriQuerySegments(); for (String s : Segments) { String pair[] = s.split("="); if (pair[0].equals(key)) { value = pair[1]; } } return value; } /* * private static String getPayloadString(byte[] payload) { * * if (payload == null) return ""; * * return new String(payload, Charset.forName("UTF-8")); } */ }
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Copyright 2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * Copyright (c) 2005, 2008, Oracle and/or its affiliates. All rights reserved. */ /* * $Id: DOMKeyInfo.java,v 1.2 2008/07/24 15:20:32 mullan Exp $ */ package org.jcp.xml.dsig.internal.dom; import javax.xml.crypto.*; import javax.xml.crypto.dsig.*; import javax.xml.crypto.dsig.dom.DOMSignContext; import javax.xml.crypto.dsig.keyinfo.KeyInfo; import javax.xml.crypto.dom.*; import java.security.Provider; import java.util.*; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * DOM-based implementation of KeyInfo. * * @author Sean Mullan */ public final class DOMKeyInfo extends DOMStructure implements KeyInfo { private final String id; private final List keyInfoTypes; /** * Creates a <code>DOMKeyInfo</code>. * * @param content a list of one or more {@link XMLStructure}s representing * key information types. The list is defensively copied to protect * against subsequent modification. * @param id an ID attribute * @throws NullPointerException if <code>content</code> is <code>null</code> * @throws IllegalArgumentException if <code>content</code> is empty * @throws ClassCastException if <code>content</code> contains any entries * that are not of type {@link XMLStructure} */ public DOMKeyInfo(List content, String id) { if (content == null) { throw new NullPointerException("content cannot be null"); } List typesCopy = new ArrayList(content); if (typesCopy.isEmpty()) { throw new IllegalArgumentException("content cannot be empty"); } for (int i = 0, size = typesCopy.size(); i < size; i++) { if (!(typesCopy.get(i) instanceof XMLStructure)) { throw new ClassCastException ("content["+i+"] is not a valid KeyInfo type"); } } this.keyInfoTypes = Collections.unmodifiableList(typesCopy); this.id = id; } /** * Creates a <code>DOMKeyInfo</code> from XML. * * @param kiElem KeyInfo element */ public DOMKeyInfo(Element kiElem, XMLCryptoContext context, Provider provider) throws MarshalException { // get Id attribute, if specified id = DOMUtils.getAttributeValue(kiElem, "Id"); // get all children nodes NodeList nl = kiElem.getChildNodes(); int length = nl.getLength(); if (length < 1) { throw new MarshalException ("KeyInfo must contain at least one type"); } List content = new ArrayList(length); for (int i = 0; i < length; i++) { Node child = nl.item(i); // ignore all non-Element nodes if (child.getNodeType() != Node.ELEMENT_NODE) { continue; } Element childElem = (Element) child; String localName = childElem.getLocalName(); if (localName.equals("X509Data")) { content.add(new DOMX509Data(childElem)); } else if (localName.equals("KeyName")) { content.add(new DOMKeyName(childElem)); } else if (localName.equals("KeyValue")) { content.add(new DOMKeyValue(childElem)); } else if (localName.equals("RetrievalMethod")) { content.add (new DOMRetrievalMethod(childElem, context, provider)); } else if (localName.equals("PGPData")) { content.add(new DOMPGPData(childElem)); } else { //may be MgmtData, SPKIData or element from other namespace content.add(new javax.xml.crypto.dom.DOMStructure((childElem))); } } keyInfoTypes = Collections.unmodifiableList(content); } public String getId() { return id; } public List getContent() { return keyInfoTypes; } public void marshal(XMLStructure parent, XMLCryptoContext context) throws MarshalException { if (parent == null) { throw new NullPointerException("parent is null"); } Node pNode = ((javax.xml.crypto.dom.DOMStructure) parent).getNode(); String dsPrefix = DOMUtils.getSignaturePrefix(context); Element kiElem = DOMUtils.createElement (DOMUtils.getOwnerDocument(pNode), "KeyInfo", XMLSignature.XMLNS, dsPrefix); if (dsPrefix == null || dsPrefix.length() == 0) { kiElem.setAttributeNS ("http://www.w3.org/2000/xmlns/", "xmlns", XMLSignature.XMLNS); } else { kiElem.setAttributeNS ("http://www.w3.org/2000/xmlns/", "xmlns:" + dsPrefix, XMLSignature.XMLNS); } marshal(pNode, kiElem, null, dsPrefix, (DOMCryptoContext) context); } public void marshal(Node parent, String dsPrefix, DOMCryptoContext context) throws MarshalException { marshal(parent, null, dsPrefix, context); } public void marshal(Node parent, Node nextSibling, String dsPrefix, DOMCryptoContext context) throws MarshalException { Document ownerDoc = DOMUtils.getOwnerDocument(parent); Element kiElem = DOMUtils.createElement (ownerDoc, "KeyInfo", XMLSignature.XMLNS, dsPrefix); marshal(parent, kiElem, nextSibling, dsPrefix, context); } private void marshal(Node parent, Element kiElem, Node nextSibling, String dsPrefix, DOMCryptoContext context) throws MarshalException { // create and append KeyInfoType elements for (int i = 0, size = keyInfoTypes.size(); i < size; i++) { XMLStructure kiType = (XMLStructure) keyInfoTypes.get(i); if (kiType instanceof DOMStructure) { ((DOMStructure) kiType).marshal(kiElem, dsPrefix, context); } else { DOMUtils.appendChild(kiElem, ((javax.xml.crypto.dom.DOMStructure) kiType).getNode()); } } // append id attribute DOMUtils.setAttributeID(kiElem, "Id", id); parent.insertBefore(kiElem, nextSibling); } public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof KeyInfo)) { return false; } KeyInfo oki = (KeyInfo) o; boolean idsEqual = (id == null ? oki.getId() == null : id.equals(oki.getId())); return (keyInfoTypes.equals(oki.getContent()) && idsEqual); } }
/******************************************************************************* * Copyright (c) 2006-2010 eBay Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 *******************************************************************************/ package org.ebayopensource.turmeric.tools.codegen.util; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.Flushable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.lang.reflect.GenericArrayType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.net.URL; import java.nio.charset.Charset; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.jar.JarFile; import java.util.logging.FileHandler; import java.util.logging.Level; import javax.wsdl.xml.WSDLLocator; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import org.ebayopensource.turmeric.runtime.common.impl.utils.CallTrackingLogger; import org.ebayopensource.turmeric.runtime.common.impl.utils.LogManager; import org.ebayopensource.turmeric.tools.codegen.InputOptions; import org.xml.sax.InputSource; import org.ebayopensource.turmeric.runtime.codegen.common.PkgNSMappingType; import org.ebayopensource.turmeric.runtime.codegen.common.PkgToNSMappingList; /** * Provides utility methods for code generation tools. * * * @author rmandapati */ public class CodeGenUtil { public static final String HTTP = "http://"; public static final char PACKAGE_CLASS_DELIMITER = '.'; private static CallTrackingLogger logger = LogManager.getInstance(CodeGenUtil.class); private static CallTrackingLogger getLogger(){ return logger; } public static boolean isEmptyString(String str) { return (str == null || str.trim().length() == 0); } public static String toQualifiedClassName(String javaFilePath) { String filePathNoExt = null; if (javaFilePath == null) { return null; } boolean isJavaExtensionPresent = javaFilePath.endsWith(".java"); if (isJavaExtensionPresent) { filePathNoExt = javaFilePath .substring(0, javaFilePath.length() - 5); } boolean isClassExtensionPresent = javaFilePath.endsWith(".class"); if (isClassExtensionPresent) { filePathNoExt = javaFilePath .substring(0, javaFilePath.length() - 6); } if (filePathNoExt == null) filePathNoExt = javaFilePath; return filePathNoExt.replace('\\', '.').replace('/', '.'); } public static String getQualifiedClassName(String javaFilePath, String srcLocation) { int pkgStartPos = javaFilePath.indexOf(srcLocation); String qualifiedJavaFile = null; if (pkgStartPos > -1) { String normalizedSrcLoc = CodeGenUtil.normalizePath(srcLocation); int startPos = pkgStartPos + normalizedSrcLoc.length(); qualifiedJavaFile = javaFilePath.substring(startPos); } else { qualifiedJavaFile = javaFilePath; } return toQualifiedClassName(qualifiedJavaFile); } public static String getPackageName(String className) { int idx = className.lastIndexOf("."); if (idx <= 0) { return ""; } return className.substring(0, idx); } /* * This method is changed to private * Since hardcoding of "\\" makes it as OS dependent. * Use toOSFilePath() instead of this method. */ private static String normalizePath(String path) { if (path == null) { return null; } if (path.endsWith("\\") || path.endsWith("/")) { return path; } else { return path + File.separatorChar; } } public static String toOSFilePath(String path) { if (path == null) { return null; } String normaliedOSPath = path.replace('\\', File.separatorChar) .replace('/', File.separatorChar); return normalizePath(normaliedOSPath); } public static String getFilePath(String dir, String fileName) { if (dir == null || fileName == null) { return null; } String filePath = toOSFilePath(dir) + fileName; return filePath; } public static String toJavaSrcFilePath(String srcDir, Class<?> clazz) { if (srcDir == null || clazz == null) { return null; } String filePath = toJavaSrcFilePath(srcDir, clazz.getName()); return filePath; } public static String toJavaSrcFilePath(String srcDir, String qualifiedJavaName) { if (srcDir == null || qualifiedJavaName == null) { return null; } String filePath = toOSFilePath(srcDir) + convertToJavaSrcFilePath(qualifiedJavaName); return filePath; } private static String convertToJavaSrcFilePath(String qualifiedJavaName) { if (isEmptyString(qualifiedJavaName)) { return qualifiedJavaName; } int dotJavaPos = qualifiedJavaName.lastIndexOf(".java"); if (dotJavaPos > -1) { return convertToFilePath(qualifiedJavaName.substring(0, dotJavaPos), ".java"); } else { return convertToFilePath(qualifiedJavaName, ".java"); } } public static String convertToFilePath(String qualifiedJavaName, String suffix) { return qualifiedJavaName.replace('.', File.separatorChar) + suffix; } public static String normalizePackageName(String packageName) { if (isEmptyString(packageName) || !packageName.endsWith(".")) { return packageName; } else { return packageName.substring(0, packageName.length()-1); } } public static boolean isParameterizedType(Type type) { return (type instanceof ParameterizedType); } public static boolean isWildCardType(Type type) { return (type instanceof WildcardType); } public static boolean isGenericArrayType(Type type) { return (type instanceof GenericArrayType); } public static String makeFirstLetterUpper(String str) { if (isEmptyString(str)) { return str; } char firstChar = str.charAt(0); if (Character.isLetter(firstChar) && Character.isLowerCase(firstChar)) { char[] chars = str.toCharArray(); chars[0] = Character.toUpperCase(firstChar); return String.valueOf(chars); } else { return str; } } public static String makeFirstLetterLower(String str) { if (isEmptyString(str)) { return str; } char firstChar = str.charAt(0); if (Character.isLetter(firstChar) && Character.isUpperCase(firstChar)) { char[] chars = str.toCharArray(); chars[0] = Character.toLowerCase(firstChar); return String.valueOf(chars); } else { return str; } } public static File getDir(String destDir) throws IOException { if (destDir == null) { return null; } File dir = new File(destDir); if(!dir.exists() || !dir.isDirectory()) { throw new IOException(destDir + ": non-existent directory"); } return dir; } public static String genDestFolderPath( String destLoc, String serviceName, String suffixPath) { StringBuilder destFolderPath = new StringBuilder(); destFolderPath.append(toOSFilePath(destLoc)); destFolderPath.append(toOSFilePath(suffixPath)); destFolderPath.append(serviceName); return destFolderPath.toString(); } public static String genDestFolderPath(String destLoc, String suffixLoc) { if (isEmptyString(destLoc)) { return destLoc; } String destPath = toOSFilePath(destLoc); if (!isEmptyString(suffixLoc)) { destPath = destPath + toOSFilePath(suffixLoc); } return destPath; } public static String getNSFromPackageName(String packageName) { StringBuffer strBuf = new StringBuffer(); int prevIndex = packageName.length(); int currentIndex = packageName.lastIndexOf(PACKAGE_CLASS_DELIMITER); if (currentIndex > 0) { strBuf.append(HTTP); } else if (prevIndex > 0) { strBuf.append(HTTP); strBuf.append(packageName); return strBuf.toString(); } else if (currentIndex == -1) { return strBuf.toString(); } while (currentIndex != -1) { strBuf.append(packageName.substring(currentIndex + 1, prevIndex)); prevIndex = currentIndex; currentIndex = packageName.lastIndexOf(PACKAGE_CLASS_DELIMITER, prevIndex - 1); strBuf.append(PACKAGE_CLASS_DELIMITER); if (currentIndex == -1) { strBuf.append(packageName.substring(0, prevIndex)); } } return strBuf.toString(); } public static File createDir(String dirPath) throws IOException { File dir = new File(dirPath); if(dir.exists()) { // It exists. all done. return dir; } if(dir.mkdirs() == false) { // Unable to create directories. throw new IOException("Failed to create dir(s) : " + dirPath); } return dir; } public static boolean isFileExists(String filePath) { if (isEmptyString(filePath)) { return false; } File file = new File(filePath); return file.exists(); } public static boolean dirExists(String path) { if (isEmptyString(path)) { return false; } File dir = new File(path); return dir.exists() && dir.isDirectory(); } public static void deleteFile(File file) throws IOException { if (file == null || !file.exists()) { return; } if (!file.delete()) { throw new IOException("Can't delete file : " + file.getPath()); } } // Deletes all sub-dir, files under a dir, // and also deletes given dir public static boolean deleteDir(File dir) { if (dir.isDirectory()) { String[] children = dir.list(); for (int i=0; i<children.length; i++) { boolean success = deleteDir(new File(dir, children[i])); if (!success) { return false; } } } // The directory is now empty so delete it return dir.delete(); } // Deletes all sub-dir, files under a dir, // it does not deletes given dir public static boolean deleteContentsOfDir(File dir) { if (dir.isDirectory()) { String[] children = dir.list(); for (int i = 0; i < children.length; i++) { boolean success = deleteDir(new File(dir, children[i])); if (!success) { return false; } } } return true; } public static Writer getFileWriter( String destDir, String fileName) throws IOException { if (isEmptyString(destDir) || isEmptyString(fileName)) { return null; } File dir = createDir(destDir); File outputFile = new File(dir, fileName); // delete previous file, if exists deleteFile(outputFile); Charset defaultCharset = Charset.defaultCharset(); FileOutputStream fileOutStream = new FileOutputStream(outputFile); OutputStreamWriter bw = new OutputStreamWriter(fileOutStream,defaultCharset); Writer buffWriter = new BufferedWriter(bw); return buffWriter; } public static BufferedReader getFileReader( String destDir, String fileName) throws IOException { if (isEmptyString(destDir) || isEmptyString(fileName)) { return null; } File dir = createDir(destDir); File inFile = new File(dir, fileName); Charset defaultCharset = Charset.defaultCharset(); FileInputStream fileInStream = new FileInputStream(inFile); InputStreamReader bw = new InputStreamReader(fileInStream,defaultCharset); BufferedReader buffReader = new BufferedReader(bw); return buffReader; } public static OutputStream getFileOutputStream( String destDir, String fileName) throws IOException { if (isEmptyString(destDir) || isEmptyString(fileName)) { return null; } File dir = createDir(destDir); File outputFile = new File(dir, fileName); // delete previous file, if exists deleteFile(outputFile); FileOutputStream fileOutStream = new FileOutputStream(outputFile); return fileOutStream; } public static void closeQuietly(InputSource inputSource) { if (inputSource == null) { return; // nothing to do } closeQuietly(inputSource.getCharacterStream()); closeQuietly(inputSource.getByteStream()); } public static void closeQuietly(WSDLLocator locator) { if (locator == null) { return; // nothing to do } locator.close(); } public static void closeQuietly(XMLStreamWriter writer) { if (writer == null) { return; // nothing to do } try { writer.close(); } catch (XMLStreamException ignore) { /* ignore */ } } public static void closeQuietly(JarFile jarfile) { if (jarfile == null) { return; // nothing to do } try { jarfile.close(); } catch (IOException ignore) { /* ignore */ } } public static void closeQuietly(FileHandler fileHandler) { if (fileHandler == null) { return; // nothing to do } fileHandler.close(); } public static void closeQuietly(Closeable closeable) { if (closeable == null) { return; // nothing to do } try { closeable.close(); } catch (IOException ignore) { /* ignore */ } } public static void flushAndCloseQuietly(Closeable closeable) { if (closeable == null) { return; // nothing to do } try { if(closeable instanceof Flushable){ ((Flushable)closeable).flush(); } } catch (IOException e) { /* ignore */ } try { closeable.close(); } catch (IOException ignore) { /* ignore */ } } public static void move( String srcFilePath, String destLoc, boolean override) throws IOException { File srcFile = new File(srcFilePath); File destDir = createDir(destLoc); File newFile = new File(destDir, srcFile.getName()); if (newFile.exists()) { if (override == false) { return; } else { deleteFile(newFile); } } // Move file to new directory boolean success = srcFile.renameTo(newFile); if (success == false) { throw new IOException("Failed to move file : " + srcFilePath + " to " + destLoc); } } public static void addAllFiles(File dir, List<String> files) { if (dir.isDirectory()) { if (!dir.getName().equals(".")&& !dir.getName().equals("..")) { File[] children = dir.listFiles(); for (int i = 0; i < children.length; i++) { if (children[i].isDirectory()) { addAllFiles(children[i], files); } else { files.add(children[i].getAbsolutePath()); } } } } } public static String getTemplateContent(String templateName) throws IOException { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); InputStream inputStream = classLoader.getResourceAsStream(templateName); if (inputStream == null) { throw new IOException("Failed to load resource : " + templateName); } String templateContent = readContent(inputStream); return templateContent; } private static String readContent(InputStream input) throws IOException { Charset defaultCharset = Charset.defaultCharset(); InputStreamReader isr = new InputStreamReader(input,defaultCharset); BufferedReader reader = new BufferedReader(isr); StringBuilder strBuff = new StringBuilder(); try { char[] charBuff = new char[512]; int charsRead = -1; while ((charsRead = reader.read(charBuff)) > -1) { strBuff.append(charBuff, 0, charsRead); } } finally { reader.close(); } return strBuff.toString(); } public static String getFileContents(String filePath) throws IOException{ FileInputStream fileInStream = new FileInputStream(filePath); return readContent(fileInStream); } public static void writeToFile( String destLoc, String fileName, String contents) throws IOException { Writer fileWriter = null; try { fileWriter = getFileWriter(destLoc, fileName); fileWriter.write(contents); } finally { closeQuietly(fileWriter); } } public static Map<String, String> createNS2PackageMap(InputOptions inputOptions) { Map<String, String> ns2PkgMap = new HashMap<String, String>(); PkgToNSMappingList pkgNsMapList = inputOptions.getPkgNSMappings(); if (pkgNsMapList != null && !pkgNsMapList.getPkgNsMap().isEmpty()) { for (PkgNSMappingType pkgNsMapType : pkgNsMapList.getPkgNsMap()) { ns2PkgMap.put(pkgNsMapType.getNamespace(), pkgNsMapType.getPackage()); } } return ns2PkgMap; } public static String getJavaClassName(String className) { int idx = className.lastIndexOf("."); if (idx <= 0) { return ""; } return className.substring(idx+1); } public static String getFolderPathFrompackageName(String packageName) { if(packageName==null) return null; packageName = packageName.replace('.', File.separatorChar); return toOSFilePath(packageName); } public static File urlToFile(URL url){ File file = null; try{ file = new File(url.toURI()); }catch(Exception exception){ file = new File(url.getPath()); } return file; } /** * Tries to get the input stream using the classloader used for the loading the class passed as input param. If not found * then it tries to load the file from the current Threads context classloader * @param relativeFilePath * @param parentClassLoader (optional) * @return */ public static InputStream getInputStreamForAFileFromClasspath(String relativeFilePath, ClassLoader parentClassLoader ){ relativeFilePath = relativeFilePath.replace("\\", "/"); getLogger().log(Level.INFO, "call to getInputStreamForAFileFromClasspath for path : " + relativeFilePath); InputStream inStream = null; if(parentClassLoader != null) inStream = parentClassLoader.getResourceAsStream(relativeFilePath); if(inStream == null){ ClassLoader myClassLoader = Thread.currentThread().getContextClassLoader(); inStream = myClassLoader.getResourceAsStream(relativeFilePath); } if(inStream == null) inStream = CodeGenUtil.class.getClassLoader().getResourceAsStream(relativeFilePath); if(inStream == null) getLogger().log(Level.WARNING, "Could not find the file from classpath : " + relativeFilePath + " in the method getInputStreamForAFileFromClasspath"); else getLogger().log(Level.INFO, "Found the file from classpath : " + relativeFilePath + " in the method getInputStreamForAFileFromClasspath"); return inStream; } public static File getFileFromInputStream (InputStream inputStream , String fileExtension){ FileOutputStream fileOutputStream = null; File file = null; try { file = File.createTempFile("ebayCodegen", fileExtension); byte[] bytes = new byte[10000]; fileOutputStream = new FileOutputStream(file); int readCount = 0; while ( (readCount = inputStream.read(bytes)) > 0 ){ fileOutputStream.write(bytes,0,readCount); } } catch (IOException e) { getLogger().log(Level.INFO, "exception while trying to create the tekmp file : exception is : " + e.getMessage()); } finally{ if(fileOutputStream != null){ try { fileOutputStream.close(); } catch (IOException e) { getLogger().log(Level.FINE, "Exception while closing the file outut stream for the file "); } } } return file; } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2006-2012 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.datadictionary.validation; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.core.api.uif.RemotableAttributeError; import org.kuali.rice.core.api.uif.RemotableAttributeField; import org.kuali.rice.core.api.util.RiceKeyConstants; import org.kuali.rice.core.api.util.Truth; import org.kuali.rice.core.api.util.type.TypeUtils; import org.kuali.rice.core.web.format.Formatter; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.data.DataObjectWrapper; import org.kuali.rice.krad.data.KradDataServiceLocator; import org.kuali.rice.krad.datadictionary.PrimitiveAttributeDefinition; import org.kuali.rice.krad.datadictionary.RelationshipDefinition; import org.kuali.rice.krad.service.DataDictionaryRemoteFieldService; import org.kuali.rice.krad.service.DataDictionaryService; import org.kuali.rice.krad.service.DictionaryValidationService; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.util.ErrorMessage; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.KRADUtils; import java.beans.PropertyDescriptor; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; /** * <p>An abstract base class for type service implementations which provides default validation of attributes from the Data * Dictionary. It attempts to remain module independent by requiring the translation of the attribute definitions to a * generic format that includes the required {@link RemotableAttributeField}s as an unimplemented template method, * see{@link #getTypeAttributeDefinitions(String)}. * </p> * <p>Note that any {@link RemotableAttributeError}s returned from here should be fully resolved to the messages to be * displayed to the user (in other words, they should not contain error keys). <b>The same approach should be taken by * subclasses since the message resources may not be present on the remote server that is invoking this service</b>. * There is a {@link #createErrorString(String, String...)} utility method that can be used to resolve * errorKeys and format them appropriately.</p> * * @author Kuali Rice Team (rice.collab@kuali.org) */ public abstract class AttributeValidatingTypeServiceBase { private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(AttributeValidatingTypeServiceBase.class); private static final String ANY_CHAR_PATTERN_S = ".*"; private static final Pattern ANY_CHAR_PATTERN = Pattern.compile(ANY_CHAR_PATTERN_S); private DictionaryValidationService dictionaryValidationService; private DataDictionaryService dataDictionaryService; private DataDictionaryRemoteFieldService dataDictionaryRemoteFieldService; /** * Retrieves active type attribute definitions and translates them into a module-independent representation. Note * that they should be returned in the order desired for display. * * @param typeId the identifier for the type * @return a correctly ordered List of active, module-independent type attribute definitions */ protected abstract List<TypeAttributeDefinition> getTypeAttributeDefinitions(String typeId); /** * Validates an attribute that is *not* mapped to a data dictionary component via * {@link TypeAttributeDefinition#componentName} and {@link TypeAttributeDefinition#name}. * * @param attr the RemotableAttributeField for which to validate. * @param key the attribute name * @param value the attribute value * @return a List of {@link RemotableAttributeError}s with fully resolved error messages (not error keys). May * return null or an empty List if no errors are encountered. */ protected abstract List<RemotableAttributeError> validateNonDataDictionaryAttribute(RemotableAttributeField attr, String key, String value); /** * <p>This is the default implementation. It calls into the service for each attribute to * validate it there. No combination validation is done. That should be done * by overriding this method.</p> * <p>This implementation calls {@link #getTypeAttributeDefinitions(String)} to retrieve module-agnostic * representations. It then iterates through the entry set of attributes, and calls * {@link #validateNonDataDictionaryAttribute(org.kuali.rice.core.api.uif.RemotableAttributeField, String, String)} * or {@link #validateDataDictionaryAttribute(AttributeValidatingTypeServiceBase.TypeAttributeDefinition, String, String)} * as appropriate. Lastly it calls {@link #validateReferencesExistAndActive(java.util.Map, java.util.Map, java.util.List)}. * </p> * * @param typeId the identifier for the type * @param attributes the Map of attribute names to values * @return the List of errors ({@link RemotableAttributeError}s) encountered during validation. */ public List<RemotableAttributeError> validateAttributes(String typeId, Map<String, String> attributes) { if (StringUtils.isBlank(typeId)) { throw new RiceIllegalArgumentException("typeId was null or blank"); } if (attributes == null) { throw new RiceIllegalArgumentException("attributes was null or blank"); } List<TypeAttributeDefinition> definitions = getTypeAttributeDefinitions(typeId); Map<String, TypeAttributeDefinition> typeAttributeDefinitionMap = buildTypeAttributeDefinitionMapByName(definitions); final List<RemotableAttributeError> validationErrors = new ArrayList<RemotableAttributeError>(); for ( Map.Entry<String, String> entry : attributes.entrySet() ) { TypeAttributeDefinition typeAttributeDefinition = typeAttributeDefinitionMap.get(entry.getKey()); final List<RemotableAttributeError> attributeErrors; if (typeAttributeDefinition != null) { if (typeAttributeDefinition.getComponentName() == null) { attributeErrors = validateNonDataDictionaryAttribute(typeAttributeDefinition.getField(), entry.getKey(), entry.getValue()); } else { attributeErrors = validateDataDictionaryAttribute(typeAttributeDefinition, entry.getKey(), entry.getValue()); } if ( attributeErrors != null ) { validationErrors.addAll(attributeErrors); } } } final List<RemotableAttributeError> referenceCheckErrors = validateReferencesExistAndActive(typeAttributeDefinitionMap, attributes, validationErrors); validationErrors.addAll(referenceCheckErrors); return Collections.unmodifiableList(validationErrors); } private Map<String, TypeAttributeDefinition> buildTypeAttributeDefinitionMapByName( List<TypeAttributeDefinition> definitions) {// throw them into a map by name Map<String, TypeAttributeDefinition> typeAttributeDefinitionMap; if (definitions == null || definitions.size() == 0) { typeAttributeDefinitionMap = Collections.<String, TypeAttributeDefinition>emptyMap(); } else { typeAttributeDefinitionMap = new HashMap<String, TypeAttributeDefinition>(); for (TypeAttributeDefinition definition : definitions) { typeAttributeDefinitionMap.put(definition.getName(), definition); } } return typeAttributeDefinitionMap; } /** * <p>Cross-validates referenced components amongst attributes to ensure they refer to existing and active * business objects.</p> * <p>This implementation instantiates any components mapped by attributes, populates them as best it can, and then * uses the {@link DataDictionaryService} to get relationship information. Then, through the * {@link DictionaryValidationService} it attempts to ensure that any referenced business objects mapped by other * attributes exist and are active. It pulls any errors encountered out of the global error map via calls to * {@link #extractErrorsFromGlobalVariablesErrorMap(String)}</p> * <p>TODO: who can explain this? :-)</p> * * @param typeAttributeDefinitionMap a Map from attribute name to {@link TypeAttributeDefinition} containing all of * the attribute definitions for this type. * @param attributes the Map of attribute names to values * @param previousValidationErrors a List of previously encountered errors used to short circuit testing on * attributes that are already known to have errors. * @return the List of errors encountered. Cannot return null. */ protected List<RemotableAttributeError> validateReferencesExistAndActive( Map<String, TypeAttributeDefinition> typeAttributeDefinitionMap, Map<String, String> attributes, List<RemotableAttributeError> previousValidationErrors) { // // Here there be dragons -- adapted from DataDictionaryTypeServiceBase, please excuse X-. // Map<String, BusinessObject> componentClassInstances = new HashMap<String, BusinessObject>(); List<RemotableAttributeError> errors = new ArrayList<RemotableAttributeError>(); // Create an instance of each component and shove it into the componentClassInstances for ( String attributeName : attributes.keySet() ) { TypeAttributeDefinition attr = typeAttributeDefinitionMap.get(attributeName); if ((attr != null) && StringUtils.isNotBlank(attr.getComponentName())) { if (!componentClassInstances.containsKey(attr.getComponentName())) { try { Class<?> componentClass = Class.forName(attr.getComponentName()); if (!BusinessObject.class.isAssignableFrom(componentClass)) { LOG.warn("Class " + componentClass.getName() + " does not implement BusinessObject. Unable to perform reference existence and active validation"); continue; } BusinessObject componentInstance = (BusinessObject) componentClass.newInstance(); componentClassInstances.put(attr.getComponentName(), componentInstance); } catch (Exception e) { LOG.error("Unable to instantiate class for attribute: " + attributeName, e); } } } } // now that we have instances for each component class, try to populate them with any attribute we can, // assuming there were no other validation errors associated with it for ( Map.Entry<String, String> entry : attributes.entrySet() ) { if (!RemotableAttributeError.containsAttribute(entry.getKey(), previousValidationErrors)) { for (Object componentInstance : componentClassInstances.values()) { try { DataObjectWrapper wrapper = KradDataServiceLocator.getDataObjectService().wrap(componentInstance); wrapper.setPropertyValues(Collections.singletonMap(entry.getKey(), entry.getValue())); } catch (Exception e) { LOG.error("Unable to set object property class: " + componentInstance.getClass().getName() + " property: " + entry.getKey(), e); } } } } for (Map.Entry<String, BusinessObject> entry : componentClassInstances.entrySet()) { List<RelationshipDefinition> relationships = getDataDictionaryService().getDataDictionary().getBusinessObjectEntry(entry.getKey()).getRelationships(); if (relationships == null) { continue; } for (RelationshipDefinition relationshipDefinition : relationships) { List<PrimitiveAttributeDefinition> primitiveAttributes = relationshipDefinition.getPrimitiveAttributes(); // this code assumes that the last defined primitiveAttribute is the attributeToHighlightOnFail String attributeToHighlightOnFail = primitiveAttributes.get(primitiveAttributes.size() - 1).getSourceName(); // TODO: will this work for user ID attributes? if (attributes.containsKey(attributeToHighlightOnFail)) { TypeAttributeDefinition attr = typeAttributeDefinitionMap.get(attributeToHighlightOnFail); if (attr != null) { final String attributeDisplayLabel; if (StringUtils.isNotBlank(attr.getComponentName())) { attributeDisplayLabel = getDataDictionaryService().getAttributeLabel(attr.getComponentName(), attributeToHighlightOnFail); } else { attributeDisplayLabel = attr.getLabel(); } getDictionaryValidationService().validateReferenceExistsAndIsActive(entry.getValue(), relationshipDefinition.getObjectAttributeName(), attributeToHighlightOnFail, attributeDisplayLabel); } List<String> extractedErrors = extractErrorsFromGlobalVariablesErrorMap(attributeToHighlightOnFail); if (CollectionUtils.isNotEmpty(extractedErrors)) { errors.add(RemotableAttributeError.Builder.create(attributeToHighlightOnFail, extractedErrors).build()); } } } } return errors; } /** * <p>Returns a String suitable for use in error messages to represent the given attribute.</p> * <p>This implementation returns a String of the format "longLabel (shortLabel)" where those fields are pulled * from the passed in definition.</p> * * @param definition the definition for which to create an error label. * @return the error label String. */ protected static String getAttributeErrorLabel(RemotableAttributeField definition) { String longAttributeLabel = definition.getLongLabel(); String shortAttributeLabel = definition.getShortLabel(); return longAttributeLabel + " (" + shortAttributeLabel + ")"; } /** * <p>creates an error String from the given errorKey and parameters.</p> * <p>This implementation will attempt to resolve the errorKey using the {@link ConfigurationService}, and format it * with the provided params using {@link MessageFormat#format(String, Object...)}. If the errorKey can't be * resolved, it will return a string like the following: errorKey:param1;param2;param3; * </p> * * @param errorKey the errorKey * @param params the error params * @return error string */ protected String createErrorString(String errorKey, String... params) { String errorString = getConfigurationService().getPropertyValueAsString(errorKey); if (StringUtils.isEmpty(errorString)) { final StringBuilder s = new StringBuilder(errorKey).append(':'); if (params != null) { for (String p : params) { if (p != null) { s.append(p); s.append(';'); } } } errorString = s.toString(); } else { errorString = MessageFormat.format(errorString, params); } return errorString; } /** * <p>Validates a data dictionary mapped attribute for a primitive property.</p> * <p>This implementation checks that the attribute is defined using the {@link DataDictionaryService} if it is * from a specific set of types defined in TypeUtils. Then, if the value is not blank, it checks for errors by * calling * {@link #validateAttributeFormat(org.kuali.rice.core.api.uif.RemotableAttributeField, String, String, String, String)}. * If it is blank, it checks for errors by calling * {@link #validateAttributeRequired(org.kuali.rice.core.api.uif.RemotableAttributeField, String, String, Object, String)} * .</p> * * @param typeAttributeDefinition the definition for the attribute * @param componentName the data dictionary component name * @param object the instance of the component * @param propertyDescriptor the descriptor for the property that the attribute maps to * @return a List of errors ({@link RemotableAttributeError}s) encountered during validation. Cannot return null. */ protected List<RemotableAttributeError> validatePrimitiveAttributeFromDescriptor ( TypeAttributeDefinition typeAttributeDefinition, String componentName, Object object, PropertyDescriptor propertyDescriptor) { List<RemotableAttributeError> errors = new ArrayList<RemotableAttributeError>(); // validate the primitive attributes if defined in the dictionary if (null != propertyDescriptor && getDataDictionaryService().isAttributeDefined(componentName, propertyDescriptor.getName())) { DataObjectWrapper wrapper = KradDataServiceLocator.getDataObjectService().wrap(object); Object value = wrapper.getPropertyValue(propertyDescriptor.getName()); Class<?> propertyType = propertyDescriptor.getPropertyType(); if (TypeUtils.isStringClass(propertyType) || TypeUtils.isIntegralClass(propertyType) || TypeUtils.isDecimalClass(propertyType) || TypeUtils.isTemporalClass(propertyType)) { // check value format against dictionary if (value != null && StringUtils.isNotBlank(value.toString())) { if (!TypeUtils.isTemporalClass(propertyType)) { errors.addAll(validateAttributeFormat(typeAttributeDefinition.getField(), componentName, propertyDescriptor.getName(), value.toString(), propertyDescriptor.getName())); } } else { // if it's blank, then we check whether the attribute should be required errors.addAll(validateAttributeRequired(typeAttributeDefinition.getField(), componentName, propertyDescriptor.getName(), value, propertyDescriptor.getName())); } } } return errors; } /** * <p>Validates required-ness of an attribute against its corresponding value</p> * <p>This implementation checks if an attribute value is null or blank, and if so checks if the * {@link RemotableAttributeField} is required. If it is, a {@link RemotableAttributeError} is created * with the message populated by a call to {@link #createErrorString(String, String...)}.</p> * * @param field the field for the attribute being tested * @param objectClassName the class name for the component * @param attributeName the name of the attribute * @param attributeValue the value of the attribute * @param errorKey the errorKey used to identify the field * @return the List of errors ({@link RemotableAttributeError}s) encountered during validation. Cannot return null. */ protected List<RemotableAttributeError> validateAttributeRequired(RemotableAttributeField field, String objectClassName, String attributeName, Object attributeValue, String errorKey) { List<RemotableAttributeError> errors = new ArrayList<RemotableAttributeError>(); // check if field is a required field for the business object if (attributeValue == null || (attributeValue instanceof String && StringUtils.isBlank((String) attributeValue))) { boolean required = field.isRequired(); if (required) { // get label of attribute for message String errorLabel = getAttributeErrorLabel(field); errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString(RiceKeyConstants.ERROR_REQUIRED, errorLabel)).build()); } } return errors; } /** * <p>Gets the validation {@link Pattern} for the given {@link RemotableAttributeField}.</p> * <p>This implementation checks if there is a regexConstraint set on the field, and if so * it compiles a Pattern (with no special flags) using it. Otherwise, it returns a pattern that * always matches.</p> * * @param field the field for which to return a validation {@link Pattern}. * @return the compiled {@link Pattern} to use in validation the given field. */ protected Pattern getAttributeValidatingExpression(RemotableAttributeField field) { if (field == null || StringUtils.isBlank(field.getRegexConstraint())) { return ANY_CHAR_PATTERN; } return Pattern.compile(field.getRegexConstraint()); } /** * <p>Gets a {@link Formatter} appropriate for the data type of the given field.</p> * <p>This implementation returns null if {@link org.kuali.rice.core.api.uif.RemotableAttributeField#getDataType()} * returns null. Otherwise, it returns the result of calling {@link Formatter#getFormatter(Class)} on the * {@link org.kuali.rice.core.api.data.DataType}'s type</p> * * @param field the field for which to provide a {@link Formatter}. * @return an applicable {@link Formatter}, or null if one can't be found. */ protected Formatter getAttributeFormatter(RemotableAttributeField field) { if (field.getDataType() == null) { return null; } return Formatter.getFormatter(field.getDataType().getType()); } /** * <p>Validates the format of the value for the given attribute field.</p> * <p>This implementation checks if the attribute value is not blank, in which case it checks (as applicable) the * max length, min length, min value, max value, and format (using the {@link Pattern} returned by * {@link #getAttributeValidatingExpression(org.kuali.rice.core.api.uif.RemotableAttributeField)}). If that doesn't * match, it will use the Formatter returned by * {@link #getAttributeFormatter(org.kuali.rice.core.api.uif.RemotableAttributeField)} to format the value and try * matching against it again. For each format error that is found, * {@link #createErrorString(String, String...)} is called to prepare the text for the * {@link RemotableAttributeError} that is generated. * * @param field the field for the attribute whose value we are validating * @param objectClassName the name of the class to which the attribute belongs * @param attributeName the name of the attribute * @param attributeValue the String value whose format we are validating * @param errorKey the name of the property on the object class that this attribute maps to * @return a List containing any errors ({@link RemotableAttributeError}s) that are detected. */ protected List<RemotableAttributeError> validateAttributeFormat(RemotableAttributeField field, String objectClassName, String attributeName, String attributeValue, String errorKey) { List<RemotableAttributeError> errors = new ArrayList<RemotableAttributeError>(); String errorLabel = getAttributeErrorLabel(field); if ( LOG.isDebugEnabled() ) { LOG.debug("(bo, attributeName, attributeValue) = (" + objectClassName + "," + attributeName + "," + attributeValue + ")"); } if (StringUtils.isNotBlank(attributeValue)) { Integer maxLength = field.getMaxLength(); if ((maxLength != null) && (maxLength.intValue() < attributeValue.length())) { errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString(RiceKeyConstants.ERROR_MAX_LENGTH, errorLabel, maxLength.toString())).build()); return errors; } Integer minLength = field.getMinLength(); if ((minLength != null) && (minLength.intValue() > attributeValue.length())) { errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString(RiceKeyConstants.ERROR_MIN_LENGTH, errorLabel, minLength.toString())).build()); return errors; } Pattern validationExpression = getAttributeValidatingExpression(field); if (!ANY_CHAR_PATTERN_S.equals(validationExpression.pattern())) { if ( LOG.isDebugEnabled() ) { LOG.debug("(bo, attributeName, validationExpression) = (" + objectClassName + "," + attributeName + "," + validationExpression + ")"); } if (!validationExpression.matcher(attributeValue).matches()) { boolean isError=true; final Formatter formatter = getAttributeFormatter(field); if (formatter != null) { Object o = formatter.format(attributeValue); isError = !validationExpression.matcher(String.valueOf(o)).matches(); } if (isError) { errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString(field.getRegexContraintMsg(), errorLabel)) .build()); } return errors; } } Double min = field.getMinValue(); if (min != null) { try { if (Double.parseDouble(attributeValue) < min) { errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString( RiceKeyConstants.ERROR_INCLUSIVE_MIN, errorLabel, min.toString())).build()); return errors; } } catch (NumberFormatException e) { // quash; this indicates that the DD contained a min for a non-numeric attribute } } Double max = field.getMaxValue(); if (max != null) { try { if (Double.parseDouble(attributeValue) > max) { errors.add(RemotableAttributeError.Builder.create(errorKey, createErrorString( RiceKeyConstants.ERROR_INCLUSIVE_MAX, errorLabel, max.toString())).build()); return errors; } } catch (NumberFormatException e) { // quash; this indicates that the DD contained a max for a non-numeric attribute } } } return errors; } /** * <p>Removes all errors for the given attributeName from the global error map, transforms them as appropriate and * returns them as a List of Strings.</p> * <p>This implementation iterates through any errors found in the error map, transforms them by calling * {@link #createErrorString(String, String...)} and adds them to the List that is then returned</p> * * @param attributeName the attribute name for which to extract errors from the global error map. * @return a List of error Strings */ protected List<String> extractErrorsFromGlobalVariablesErrorMap(String attributeName) { Object results = GlobalVariables.getMessageMap().getErrorMessagesForProperty(attributeName); List<String> errors = new ArrayList<String>(); if (results instanceof String) { errors.add(createErrorString((String) results)); } else if ( results != null) { if (results instanceof List) { List<?> errorList = (List<?>)results; for (Object msg : errorList) { ErrorMessage errorMessage = (ErrorMessage)msg; errors.add(createErrorString(errorMessage.getErrorKey(), errorMessage.getMessageParameters())); } } else { String [] temp = (String []) results; for (String string : temp) { errors.add(createErrorString(string)); } } } GlobalVariables.getMessageMap().removeAllErrorMessagesForProperty(attributeName); return errors; } /** * <p>Validates the attribute value for the given {@link TypeAttributeDefinition} having a componentName.</p> * <p>This implementation instantiates a component object using reflection on the class name specified in the * {@link TypeAttributeDefinition}s componentName, gets a {@link PropertyDescriptor} for the attribute of the * component object, hydrates the attribute's value from it's String form, sets that value on the component object, * and then delegates to * {@link #validatePrimitiveAttributeFromDescriptor(AttributeValidatingTypeServiceBase.TypeAttributeDefinition, String, Object, java.beans.PropertyDescriptor)}. * </p> * * @param typeAttributeDefinition * @param attributeName * @param value * @return */ protected List<RemotableAttributeError> validateDataDictionaryAttribute(TypeAttributeDefinition typeAttributeDefinition, String attributeName, String value) { try { // create an object of the proper type per the component Object componentObject = Class.forName( typeAttributeDefinition.getComponentName() ).newInstance(); if ( attributeName != null ) { // get the bean utils descriptor for accessing the attribute on that object PropertyDescriptor propertyDescriptor = PropertyUtils.getPropertyDescriptor(componentObject, attributeName); if ( propertyDescriptor != null ) { // set the value on the object so that it can be checked Object attributeValue = getAttributeValue(propertyDescriptor, value); propertyDescriptor.getWriteMethod().invoke( componentObject, attributeValue); return validatePrimitiveAttributeFromDescriptor(typeAttributeDefinition, typeAttributeDefinition.getComponentName(), componentObject, propertyDescriptor); } } } catch (Exception e) { throw new TypeAttributeValidationException(e); } return Collections.emptyList(); } private Object getAttributeValue(PropertyDescriptor propertyDescriptor, String attributeValue){ Object attributeValueObject = null; if (propertyDescriptor!=null && attributeValue!=null) { Class<?> propertyType = propertyDescriptor.getPropertyType(); if (String.class.equals(propertyType)) { // it's already a String attributeValueObject = attributeValue; } // KULRICE-6808: Kim Role Maintenance - Custom boolean role qualifier values are not being converted properly else if (Boolean.class.equals(propertyType) || Boolean.TYPE.equals(propertyType)) { attributeValueObject = Truth.strToBooleanIgnoreCase(attributeValue); } else { // try to create one with KRADUtils for other misc data types attributeValueObject = KRADUtils.createObject(propertyType, new Class[]{String.class}, new Object[]{attributeValue}); // if that didn't work, we'll get a null back if (attributeValueObject == null ) { // this doesn't seem like a great option, since we know the property isn't of type String attributeValueObject = attributeValue; } } } return attributeValueObject; } protected DictionaryValidationService getDictionaryValidationService() { if (dictionaryValidationService == null) { dictionaryValidationService = KRADServiceLocatorWeb.getDictionaryValidationService(); } return dictionaryValidationService; } // lazy initialization holder class private static class DataDictionaryServiceHolder { public static DataDictionaryService dataDictionaryService = KRADServiceLocatorWeb.getDataDictionaryService(); } protected DataDictionaryService getDataDictionaryService() { return DataDictionaryServiceHolder.dataDictionaryService; } // lazy initialization holder class private static class DataDictionaryRemoteFieldServiceHolder { public static DataDictionaryRemoteFieldService dataDictionaryRemoteFieldService = KRADServiceLocatorWeb.getDataDictionaryRemoteFieldService(); } protected DataDictionaryRemoteFieldService getDataDictionaryRemoteFieldService() { return DataDictionaryRemoteFieldServiceHolder.dataDictionaryRemoteFieldService; } // lazy initialization holder class private static class ConfigurationServiceHolder { public static ConfigurationService configurationService = CoreApiServiceLocator.getKualiConfigurationService(); } protected ConfigurationService getConfigurationService() { return ConfigurationServiceHolder.configurationService; } protected static class TypeAttributeValidationException extends RuntimeException { protected TypeAttributeValidationException(String message) { super( message ); } protected TypeAttributeValidationException(Throwable cause) { super( cause ); } private static final long serialVersionUID = 8220618846321607801L; } /** * A module-independent representation of a type attribute containing all the information that we need * in order to validate data dictionary-based attributes. */ protected static class TypeAttributeDefinition { private final RemotableAttributeField field; private final String name; private final String componentName; private final String label; private final Map<String, String> properties; /** * Constructs a {@link TypeAttributeDefinition} * @param field the RemotableAttributeField corresponding to this definition. Must not be null. * @param name the name for this attribute. Must not be empty or null. * @param componentName The name of a data dictionary component that this field refers to. May be null. * @param label The label to use for this attribute. May be null. * @param properties a catch all for properties important to a module's type attrbute definitions * that aren't directly supported by {@link TypeAttributeDefinition}. */ public TypeAttributeDefinition(RemotableAttributeField field, String name, String componentName, String label, Map<String, String> properties) { if (field == null) throw new RiceIllegalArgumentException("field must not be null"); if (StringUtils.isEmpty(name)) throw new RiceIllegalArgumentException("name must not be empty or null"); this.field = field; this.name = name; this.componentName = componentName; this.label = label; if (properties == null || properties.isEmpty()) { this.properties = Collections.emptyMap(); } else { // make our local variable into a copy of the passed in Map properties = new HashMap<String, String>(properties); // assign in in immutable form to our class member variable this.properties = Collections.unmodifiableMap(properties); } } public RemotableAttributeField getField() { return field; } public String getName() { return name; } public String getComponentName() { return componentName; } public String getLabel() { return label; } /** * @return an unmodifiable map of properties for this attribute. Will never be null. */ public Map<String, String> getProperties() { return properties; } } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.ui.messages; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.MultiLineLabelUI; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.ui.mac.foundation.MacUtil; import com.intellij.util.Alarm; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public class MessageDialog extends DialogWrapper { protected @NlsContexts.DialogMessage @Nullable String myMessage; protected String[] myOptions; protected int myDefaultOptionIndex; protected int myFocusedOptionIndex; protected Icon myIcon; private MessagesBorderLayout myLayout; private @NonNls @Nullable String myHelpId; public MessageDialog(@Nullable Project project, @NlsContexts.DialogMessage @Nullable String message, @NlsContexts.DialogTitle String title, String @NotNull [] options, int defaultOptionIndex, @Nullable Icon icon, boolean canBeParent) { this(project, null, message, title, options, defaultOptionIndex, -1, icon, null, canBeParent); } public MessageDialog(@Nullable Project project, @Nullable Component parentComponent, @NlsContexts.DialogMessage @Nullable String message, @NlsContexts.DialogTitle String title, String @NotNull [] options, int defaultOptionIndex, int focusedOptionIndex, @Nullable Icon icon, @Nullable com.intellij.openapi.ui.DoNotAskOption doNotAskOption, boolean canBeParent) { this(project, parentComponent, message, title, options, defaultOptionIndex, focusedOptionIndex, icon, doNotAskOption, canBeParent, null); } public MessageDialog(@Nullable Project project, @Nullable Component parentComponent, @NlsContexts.DialogMessage @Nullable String message, @NlsContexts.DialogTitle String title, String @NotNull [] options, int defaultOptionIndex, int focusedOptionIndex, @Nullable Icon icon, @Nullable com.intellij.openapi.ui.DoNotAskOption doNotAskOption, boolean canBeParent, @Nullable String helpId) { super(project, parentComponent, canBeParent, IdeModalityType.IDE); _init(title, message, options, defaultOptionIndex, focusedOptionIndex, icon, doNotAskOption, helpId); } public MessageDialog(@NlsContexts.DialogMessage @Nullable String message, @NlsContexts.DialogTitle String title, String @NotNull [] options, int defaultOptionIndex, @Nullable Icon icon) { this(null, null, message, title, options, defaultOptionIndex, -1, icon, null, false); } protected MessageDialog() { super(false); } protected MessageDialog(Project project) { super(project, false); } public MessageDialog(Project project, boolean canBeParent) { super(project, canBeParent); } protected void _init(@NlsContexts.DialogTitle String title, @NlsContexts.DialogMessage @Nullable String message, String @NotNull [] options, int defaultOptionIndex, int focusedOptionIndex, @Nullable Icon icon, @Nullable com.intellij.openapi.ui.DoNotAskOption doNotAskOption, @Nullable String helpId) { setTitle(title); if (Messages.isMacSheetEmulation()) { setUndecorated(true); } myMessage = message; myOptions = options; myDefaultOptionIndex = defaultOptionIndex; myFocusedOptionIndex = focusedOptionIndex; myIcon = icon; myHelpId = helpId; setDoNotAskOption(doNotAskOption); init(); if (Messages.isMacSheetEmulation()) { MacUtil.adjustFocusTraversal(myDisposable); } } @Override protected Action @NotNull [] createActions() { List<Action> actions = new ArrayList<>(); for (int i = 0; i < myOptions.length; i++) { String option = myOptions[i]; final int exitCode = i; Action action = new AbstractAction(UIUtil.replaceMnemonicAmpersand(option)) { @Override public void actionPerformed(ActionEvent e) { close(exitCode, true); } }; if (i == myDefaultOptionIndex) { action.putValue(DEFAULT_ACTION, Boolean.TRUE); } if (i == myFocusedOptionIndex) { action.putValue(FOCUSED_ACTION, Boolean.TRUE); } UIUtil.assignMnemonic(option, action); actions.add(action); } if (getHelpId() != null) { actions.add(getHelpAction()); } return actions.toArray(new Action[0]); } @Override public void doCancelAction() { close(-1); } @Override protected JComponent createCenterPanel() { return doCreateCenterPanel(); } @NotNull @Override protected LayoutManager createRootLayout() { return Messages.isMacSheetEmulation() ? myLayout = new MessagesBorderLayout() : super.createRootLayout(); } @Override protected void dispose() { if (Messages.isMacSheetEmulation()) { animate(); } else { super.dispose(); } } @Override public void show() { if (Messages.isMacSheetEmulation()) { setInitialLocationCallback(() -> { JRootPane rootPane = SwingUtilities.getRootPane(getWindow().getParent()); if (rootPane == null) { rootPane = SwingUtilities.getRootPane(getWindow().getOwner()); } Point p = rootPane.getLocationOnScreen(); p.x += (rootPane.getWidth() - getWindow().getWidth()) / 2; return p; }); animate(); getPeer().getWindow().setOpacity(.8f); setAutoAdjustable(false); setSize(getPreferredSize().width, 0);//initial state before animation, zero height } super.show(); } private void animate() { final int height = getPreferredSize().height; final int frameCount = 10; final boolean toClose = isShowing(); final AtomicInteger i = new AtomicInteger(-1); final Alarm animator = new Alarm(myDisposable); final Runnable runnable = new Runnable() { @Override public void run() { int state = i.addAndGet(1); double linearProgress = (double)state / frameCount; if (toClose) { linearProgress = 1 - linearProgress; } myLayout.setPhase((1 - Math.cos(Math.PI * linearProgress)) / 2); Window window = getPeer().getWindow(); Rectangle bounds = window.getBounds(); bounds.height = (int)(height * myLayout.getPhase()); window.setBounds(bounds); if (state == 0 && !toClose && window.getOwner() instanceof IdeFrame) { WindowManager.getInstance().requestUserAttention((IdeFrame)window.getOwner(), true); } if (state < frameCount) { animator.addRequest(this, 10); } else if (toClose) { MessageDialog.super.dispose(); } } }; animator.addRequest(runnable, 10, ModalityState.stateForComponent(getRootPane())); } protected JComponent doCreateCenterPanel() { JPanel panel = createIconPanel(); if (myMessage != null) { JTextPane messageComponent = createMessageComponent(myMessage); panel.add(Messages.wrapToScrollPaneIfNeeded(messageComponent, 100, 15), BorderLayout.CENTER); } return panel; } @NotNull protected JPanel createIconPanel() { JPanel panel = new JPanel(new BorderLayout(15, 0)); if (myIcon != null) { JLabel iconLabel = new JLabel(myIcon); Container container = new Container(); container.setLayout(new BorderLayout()); container.add(iconLabel, BorderLayout.NORTH); panel.add(container, BorderLayout.WEST); } return panel; } @NotNull protected JPanel createMessagePanel() { JPanel messagePanel = new JPanel(new BorderLayout()); if (myMessage != null) { JLabel textLabel = new JLabel(myMessage); textLabel.setBorder(BorderFactory.createEmptyBorder(0, 0, 5, 0)); textLabel.setUI(new MultiLineLabelUI()); messagePanel.add(textLabel, BorderLayout.NORTH); } return messagePanel; } protected JTextPane createMessageComponent(final @NlsContexts.DialogMessage String message) { final JTextPane messageComponent = new JTextPane(); return Messages.configureMessagePaneUi(messageComponent, message); } @Override protected @Nullable String getHelpId() { return myHelpId; } }
package com.github.noahdi.ocr; /** * imports */ import java.awt.Color; import java.awt.EventQueue; import java.awt.Font; import java.awt.Graphics; import java.awt.SystemColor; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JTextField; import javax.swing.SwingConstants; import com.github.jannled.lib.Print; import com.github.jannled.lib.math.Matrix; import com.github.jannled.ocr.ANN; import com.github.jannled.ocr.Annone; import com.github.jannled.ocr.debug.Debugger; public class Interface_2 { public static Interface_2 intf; /** * attributes */ //Frames private JFrame frame; public static Debugger wm; JLabel zeichen[][]; boolean feldB[][]; int feldI[][]; int feldF[][]; double[] feld; /** All chars the ANN can learn, currently 26 */ char alphabet[] = new char[]{'a','b','c','d','e','f', 'g','h','i','j','k','l','m', 'n','o','p','q','r','s','t', 'u','v','w','x','y','z', '0','1','2','3','4','5', '6','7','8','9'}; double[] alpha = new double[alphabet.length]; double ja = 0.9, nein = 0.001; boolean leer; FreihandZeichnen panel; Annone ann; private JTextField letter; Graphics graph; String s = "Erkannter Buchstabe: "; JLabel lblNewLabel = new JLabel(s); ComputerZeichnen disp; /** * Very very important!!!! */ /* +++important!!!!+++ +++important!!!!+++ +++important!!!!+++*/ int breite = 28, hoehe = 28; /* +++important!!!!+++ +++important!!!!+++ +++important!!!!+++*/ int breitE =250, hoehE = 300; /** * Launch the application. * @param args Command line start arguments. */ public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { try { Interface_2 window = new Interface_2(); window.frame.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); EventQueue.invokeLater(new Runnable() { public void run() { try { wm = new Debugger(); wm.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } /** * Create the application. */ public Interface_2() { Print.setOutputLevel(Print.ALL); intf = this; initialize(); } /** * Initialize the contents of the frame. */ private void initialize() { /** * filling the attributes */ disp = new ComputerZeichnen(hoehe, breite, hoehE, breitE); zeichen = new JLabel[hoehe][breite]; feldB = new boolean[hoehe][breite]; feldI = new int[hoehe][breite]; feldF = new int[hoehe][breite]; feld = new double[(hoehe*breite)]; ann = new Annone((hoehe*breite), hoehe*breite, alphabet.length, 0.3f); //graph = new Graphics(); frame = new JFrame(); frame.getContentPane().setBackground(SystemColor.scrollbar); frame.setBounds(100, 100, 1080, 720); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().setLayout(null); panel = new FreihandZeichnen(hoehe, breite); for(int q=0;q<hoehe;q++) { for(int w=0;w<breite;w++) { zeichen[q][w] = new JLabel(""); zeichen[q][w].setBounds(720+(breitE/breite)*w, 100+(hoehE/hoehe)*q, (breitE/breite), (hoehE/hoehe)); zeichen[q][w].setOpaque(true); zeichen[q][w].setBackground(Color.WHITE); frame.getContentPane().add(zeichen[q][w]); } } for(int i=0;i<26;i++) { alpha[i]=nein; } panel.setBackground(new Color(51, 102, 102)); panel.setBounds(100, 100, 250, 300); frame.getContentPane().add(panel); JButton reset = new JButton("Reset"); reset.setFont(new Font("Comic Sans MS", Font.PLAIN, 30)); reset.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { try{ reset(); }catch(Exception ee) { } } }); reset.setBounds(700, 460, 250, 145); frame.getContentPane().add(reset); JButton lern = new JButton("Lernen"); lern.setFont(new Font("Comic Sans MS", Font.PLAIN, 30)); lern.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { try{ lernen(); reset(); }catch(Exception ee) { ee.printStackTrace(); } } }); lern.setBounds(100, 522, 250, 84); frame.getContentPane().add(lern); letter = new JTextField(); letter.setHorizontalAlignment(SwingConstants.CENTER); letter.setFont(new Font("Comic Sans MS", Font.PLAIN, 30)); letter.setBounds(100, 460, 250, 50); frame.getContentPane().add(letter); letter.setColumns(10); JButton know = new JButton("Erkennen"); know.setFont(new Font("Comic Sans MS", Font.PLAIN, 30)); know.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { //Entscheide welcher Buchstabe am wahrscheinlichsten ist. int highpos = 0; Matrix result = ann.forward(new Matrix(feld, 1, feld.length)); for(int i=0; i<result.getHeight(); i++) { if(result.getValues()[i] > result.getValues()[highpos]) { highpos = i; } } Print.m("Die Ergebnisse: " + result.transpose().toString() + "Am wahrscheinlichsten: " + alphabet[highpos] + " mit " + result.getValues()[highpos]); lblNewLabel.setText(s+ alphabet[highpos]); reset(); } }); know.setBounds(410, 521, 250, 84); frame.getContentPane().add(know); lblNewLabel.setFont(new Font("Comic Sans MS", Font.PLAIN, 20)); lblNewLabel.setHorizontalAlignment(SwingConstants.CENTER); lblNewLabel.setBounds(410, 460, 250, 50); frame.getContentPane().add(lblNewLabel); JLabel titel = new JLabel("<html>J&N Handschrifterkennung<sup><FONT SIZE=\"4\">TM</sup></html>"); titel.setFont(new Font("Comic Sans MS", Font.PLAIN, 30)); titel.setBounds(100, 11, 560, 90); frame.getContentPane().add(titel); JLabel lblNewLabel_1 = new JLabel("a product powered by JannLed und NoahDi"); lblNewLabel_1.setFont(new Font("Arial", Font.PLAIN, 15)); lblNewLabel_1.setBounds(100, 617, 560, 39); frame.getContentPane().add(lblNewLabel_1); reset(); } public void lernen() { String tf; tf = letter.getText().toLowerCase(); for(int w=0; w<alpha.length; w++) { if(tf.equals("" + alphabet[w])) { alpha[w]=ja; } } ann.backpropagate(new Matrix(feld, 1, feld.length),new Matrix(alpha, 1, alpha.length)); for(int i=0; i<ann.getWeights().length; i++) { Print.d(ann.getWeights()[i].toString()); } Print.m("Weights updatet with training sample."); } public void holen() { for(int i=0;i<hoehe;i++) { for(int j=0;j<breite;j++) { feldI[i][j]= panel.counter[i][j]; if(feldI[i][j]>0) { leer = false; feld[i*breite+j] = ja; feldB[i][j] = true; }else { feld[i*breite+j] = nein; feldB[i][j] = false; } } } } public void zeigen() { Print.d(""); Print.d(""); Color temp; for(int i=0; i<hoehe; i++) { for(int j=0; j<breite; j++) { Print.d(" "+feld[i*breite+j]); if(feldB[i][j]==true) { temp = new Color(0,(255-getF(i,j)),0); zeichen[i][j].setBackground(temp); } } Print.d(""); } for(int i=0; i<(hoehe*breite); i++) { Print.d(feld[i]+""); } } public void reset() { panel.back(); leer = true; for(int i =0;i<hoehe;i++) { for(int j=0; j<breite; j++) { feldB[i][j]= false; zeichen[i][j].setBackground(Color.WHITE); feld[i*breite+j] = nein; } } letter.setText(""); } public int getF(int a,int b) { if(feldI[a][b]<255) { return feldI[a][b]; } else { return 255; } } public ANN getANN() { return ann; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.cdn.v2020_04_15; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; /** * Origin properties needed for origin update. */ @JsonFlatten public class OriginUpdateParameters { /** * The address of the origin. Domain names, IPv4 addresses, and IPv6 * addresses are supported.This should be unique across all origins in an * endpoint. */ @JsonProperty(value = "properties.hostName") private String hostName; /** * The value of the HTTP port. Must be between 1 and 65535. */ @JsonProperty(value = "properties.httpPort") private Integer httpPort; /** * The value of the HTTPS port. Must be between 1 and 65535. */ @JsonProperty(value = "properties.httpsPort") private Integer httpsPort; /** * The host header value sent to the origin with each request. If you leave * this blank, the request hostname determines this value. Azure CDN * origins, such as Web Apps, Blob Storage, and Cloud Services require this * host header value to match the origin hostname by default. This * overrides the host header defined at Endpoint. */ @JsonProperty(value = "properties.originHostHeader") private String originHostHeader; /** * Priority of origin in given origin group for load balancing. Higher * priorities will not be used for load balancing if any lower priority * origin is healthy.Must be between 1 and 5. */ @JsonProperty(value = "properties.priority") private Integer priority; /** * Weight of the origin in given origin group for load balancing. Must be * between 1 and 1000. */ @JsonProperty(value = "properties.weight") private Integer weight; /** * Origin is enabled for load balancing or not. */ @JsonProperty(value = "properties.enabled") private Boolean enabled; /** * The Alias of the Private Link resource. Populating this optional field * indicates that this origin is 'Private'. */ @JsonProperty(value = "properties.privateLinkAlias") private String privateLinkAlias; /** * The Resource Id of the Private Link resource. Populating this optional * field indicates that this backend is 'Private'. */ @JsonProperty(value = "properties.privateLinkResourceId") private String privateLinkResourceId; /** * The location of the Private Link resource. Required only if * 'privateLinkResourceId' is populated. */ @JsonProperty(value = "properties.privateLinkLocation") private String privateLinkLocation; /** * A custom message to be included in the approval request to connect to * the Private Link. */ @JsonProperty(value = "properties.privateLinkApprovalMessage") private String privateLinkApprovalMessage; /** * Get the address of the origin. Domain names, IPv4 addresses, and IPv6 addresses are supported.This should be unique across all origins in an endpoint. * * @return the hostName value */ public String hostName() { return this.hostName; } /** * Set the address of the origin. Domain names, IPv4 addresses, and IPv6 addresses are supported.This should be unique across all origins in an endpoint. * * @param hostName the hostName value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withHostName(String hostName) { this.hostName = hostName; return this; } /** * Get the value of the HTTP port. Must be between 1 and 65535. * * @return the httpPort value */ public Integer httpPort() { return this.httpPort; } /** * Set the value of the HTTP port. Must be between 1 and 65535. * * @param httpPort the httpPort value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withHttpPort(Integer httpPort) { this.httpPort = httpPort; return this; } /** * Get the value of the HTTPS port. Must be between 1 and 65535. * * @return the httpsPort value */ public Integer httpsPort() { return this.httpsPort; } /** * Set the value of the HTTPS port. Must be between 1 and 65535. * * @param httpsPort the httpsPort value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withHttpsPort(Integer httpsPort) { this.httpsPort = httpsPort; return this; } /** * Get the host header value sent to the origin with each request. If you leave this blank, the request hostname determines this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this host header value to match the origin hostname by default. This overrides the host header defined at Endpoint. * * @return the originHostHeader value */ public String originHostHeader() { return this.originHostHeader; } /** * Set the host header value sent to the origin with each request. If you leave this blank, the request hostname determines this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this host header value to match the origin hostname by default. This overrides the host header defined at Endpoint. * * @param originHostHeader the originHostHeader value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withOriginHostHeader(String originHostHeader) { this.originHostHeader = originHostHeader; return this; } /** * Get priority of origin in given origin group for load balancing. Higher priorities will not be used for load balancing if any lower priority origin is healthy.Must be between 1 and 5. * * @return the priority value */ public Integer priority() { return this.priority; } /** * Set priority of origin in given origin group for load balancing. Higher priorities will not be used for load balancing if any lower priority origin is healthy.Must be between 1 and 5. * * @param priority the priority value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withPriority(Integer priority) { this.priority = priority; return this; } /** * Get weight of the origin in given origin group for load balancing. Must be between 1 and 1000. * * @return the weight value */ public Integer weight() { return this.weight; } /** * Set weight of the origin in given origin group for load balancing. Must be between 1 and 1000. * * @param weight the weight value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withWeight(Integer weight) { this.weight = weight; return this; } /** * Get origin is enabled for load balancing or not. * * @return the enabled value */ public Boolean enabled() { return this.enabled; } /** * Set origin is enabled for load balancing or not. * * @param enabled the enabled value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withEnabled(Boolean enabled) { this.enabled = enabled; return this; } /** * Get the Alias of the Private Link resource. Populating this optional field indicates that this origin is 'Private'. * * @return the privateLinkAlias value */ public String privateLinkAlias() { return this.privateLinkAlias; } /** * Set the Alias of the Private Link resource. Populating this optional field indicates that this origin is 'Private'. * * @param privateLinkAlias the privateLinkAlias value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withPrivateLinkAlias(String privateLinkAlias) { this.privateLinkAlias = privateLinkAlias; return this; } /** * Get the Resource Id of the Private Link resource. Populating this optional field indicates that this backend is 'Private'. * * @return the privateLinkResourceId value */ public String privateLinkResourceId() { return this.privateLinkResourceId; } /** * Set the Resource Id of the Private Link resource. Populating this optional field indicates that this backend is 'Private'. * * @param privateLinkResourceId the privateLinkResourceId value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withPrivateLinkResourceId(String privateLinkResourceId) { this.privateLinkResourceId = privateLinkResourceId; return this; } /** * Get the location of the Private Link resource. Required only if 'privateLinkResourceId' is populated. * * @return the privateLinkLocation value */ public String privateLinkLocation() { return this.privateLinkLocation; } /** * Set the location of the Private Link resource. Required only if 'privateLinkResourceId' is populated. * * @param privateLinkLocation the privateLinkLocation value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withPrivateLinkLocation(String privateLinkLocation) { this.privateLinkLocation = privateLinkLocation; return this; } /** * Get a custom message to be included in the approval request to connect to the Private Link. * * @return the privateLinkApprovalMessage value */ public String privateLinkApprovalMessage() { return this.privateLinkApprovalMessage; } /** * Set a custom message to be included in the approval request to connect to the Private Link. * * @param privateLinkApprovalMessage the privateLinkApprovalMessage value to set * @return the OriginUpdateParameters object itself. */ public OriginUpdateParameters withPrivateLinkApprovalMessage(String privateLinkApprovalMessage) { this.privateLinkApprovalMessage = privateLinkApprovalMessage; return this; } }
/** * Copyright 2013 Matija Mazi. * Copyright 2014 Kangmo Kim * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nhnsoft.bitcoin.crypto; import com.nhnsoft.bitcoin.core.ECKey; import com.nhnsoft.bitcoin.core.Utils; import com.google.common.collect.ImmutableList; import org.spongycastle.crypto.macs.HMac; import org.spongycastle.math.ec.ECPoint; import java.math.BigInteger; import java.nio.ByteBuffer; import java.security.SecureRandom; import java.util.Arrays; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; /** * Implementation of the <a href="https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki">BIP 32</a> * deterministic wallet child key generation algorithm. */ public final class HDKeyDerivation { // Some arbitrary random number. Doesn't matter what it is. private static final BigInteger RAND_INT = new BigInteger(256, new SecureRandom()); private HDKeyDerivation() { } /** * Child derivation may fail (although with extremely low probability); in such case it is re-attempted. * This is the maximum number of re-attempts (to avoid an infinite loop in case of bugs etc.). */ public static final int MAX_CHILD_DERIVATION_ATTEMPTS = 100; public static final HMac MASTER_HMAC_SHA512 = HDUtils.createHmacSha512Digest("Bitcoin seed".getBytes()); /** * Generates a new deterministic key from the given seed, which can be any arbitrary byte array. However resist * the temptation to use a string as the seed - any key derived from a password is likely to be weak and easily * broken by attackers (this is not theoretical, people have had money stolen that way). This method checks * that the given seed is at least 64 bits long. * * @throws HDDerivationException if generated master key is invalid (private key 0 or >= n). * @throws IllegalArgumentException if the seed is less than 8 bytes and could be brute forced. */ public static DeterministicKey createMasterPrivateKey(byte[] seed) throws HDDerivationException { checkArgument(seed.length > 8, "Seed is too short and could be brute forced"); // Calculate I = HMAC-SHA512(key="Bitcoin seed", msg=S) byte[] i = HDUtils.hmacSha512(MASTER_HMAC_SHA512, seed); // Split I into two 32-byte sequences, Il and Ir. // Use Il as master secret key, and Ir as master chain code. checkState(i.length == 64, i.length); byte[] il = Arrays.copyOfRange(i, 0, 32); byte[] ir = Arrays.copyOfRange(i, 32, 64); Arrays.fill(i, (byte)0); DeterministicKey masterPrivKey = createMasterPrivKeyFromBytes(il, ir); Arrays.fill(il, (byte)0); Arrays.fill(ir, (byte)0); // Child deterministic keys will chain up to their parents to find the keys. masterPrivKey.setCreationTimeSeconds(Utils.currentTimeSeconds()); return masterPrivKey; } /** * @throws HDDerivationException if privKeyBytes is invalid (0 or >= n). */ public static DeterministicKey createMasterPrivKeyFromBytes(byte[] privKeyBytes, byte[] chainCode) throws HDDerivationException { BigInteger priv = new BigInteger(1, privKeyBytes); assertNonZero(priv, "Generated master key is invalid."); assertLessThanN(priv, "Generated master key is invalid."); return new DeterministicKey(ImmutableList.<ChildNumber>of(), chainCode, priv, null); } public static DeterministicKey createMasterPubKeyFromBytes(byte[] pubKeyBytes, byte[] chainCode) { return new DeterministicKey(ImmutableList.<ChildNumber>of(), chainCode, ECKey.CURVE.getCurve().decodePoint(pubKeyBytes), null, null); } /** * Derives a key given the "extended" child number, ie. with the 0x80000000 bit specifying whether to use hardened * derivation or not. */ public static DeterministicKey deriveChildKey(DeterministicKey parent, int childNumber) { return deriveChildKey(parent, new ChildNumber(childNumber)); } /** * Derives a key of the "extended" child number, ie. with the 0x80000000 bit specifying whether to use * hardened derivation or not. If derivation fails, tries a next child. */ public static DeterministicKey deriveThisOrNextChildKey(DeterministicKey parent, int childNumber) { int nAttempts = 0; ChildNumber child = new ChildNumber(childNumber); boolean isHardened = child.isHardened(); while (nAttempts < MAX_CHILD_DERIVATION_ATTEMPTS) { try { child = new ChildNumber(child.num() + nAttempts, isHardened); return deriveChildKey(parent, child); } catch (HDDerivationException ignore) { } nAttempts++; } throw new HDDerivationException("Maximum number of child derivation attempts reached, this is probably an indication of a bug."); } /** * @throws HDDerivationException if private derivation is attempted for a public-only parent key, or * if the resulting derived key is invalid (eg. private key == 0). */ public static DeterministicKey deriveChildKey(DeterministicKey parent, ChildNumber childNumber) throws HDDerivationException { if (parent.isPubKeyOnly()) { RawKeyBytes rawKey = deriveChildKeyBytesFromPublic(parent, childNumber, PublicDeriveMode.NORMAL); return new DeterministicKey( HDUtils.append(parent.getPath(), childNumber), rawKey.chainCode, ECKey.CURVE.getCurve().decodePoint(rawKey.keyBytes), // c'tor will compress null, parent); } else { RawKeyBytes rawKey = deriveChildKeyBytesFromPrivate(parent, childNumber); return new DeterministicKey( HDUtils.append(parent.getPath(), childNumber), rawKey.chainCode, new BigInteger(1, rawKey.keyBytes), parent); } } public static RawKeyBytes deriveChildKeyBytesFromPrivate(DeterministicKey parent, ChildNumber childNumber) throws HDDerivationException { checkArgument(parent.hasPrivKey(), "Parent key must have private key bytes for this method."); byte[] parentPublicKey = ECKey.compressPoint(parent.getPubKeyPoint()).getEncoded(); assert parentPublicKey.length == 33 : parentPublicKey.length; ByteBuffer data = ByteBuffer.allocate(37); if (childNumber.isHardened()) { data.put(parent.getPrivKeyBytes33()); } else { data.put(parentPublicKey); } data.putInt(childNumber.i()); byte[] i = HDUtils.hmacSha512(parent.getChainCode(), data.array()); assert i.length == 64 : i.length; byte[] il = Arrays.copyOfRange(i, 0, 32); byte[] chainCode = Arrays.copyOfRange(i, 32, 64); BigInteger ilInt = new BigInteger(1, il); assertLessThanN(ilInt, "Illegal derived key: I_L >= n"); final BigInteger priv = parent.getPrivKey(); BigInteger ki = priv.add(ilInt).mod(ECKey.CURVE.getN()); assertNonZero(ki, "Illegal derived key: derived private key equals 0."); return new RawKeyBytes(ki.toByteArray(), chainCode); } public enum PublicDeriveMode { NORMAL, WITH_INVERSION } public static RawKeyBytes deriveChildKeyBytesFromPublic(DeterministicKey parent, ChildNumber childNumber, PublicDeriveMode mode) throws HDDerivationException { checkArgument(!childNumber.isHardened(), "Can't use private derivation with public keys only."); byte[] parentPublicKey = ECKey.compressPoint(parent.getPubKeyPoint()).getEncoded(); assert parentPublicKey.length == 33 : parentPublicKey.length; ByteBuffer data = ByteBuffer.allocate(37); data.put(parentPublicKey); data.putInt(childNumber.i()); byte[] i = HDUtils.hmacSha512(parent.getChainCode(), data.array()); assert i.length == 64 : i.length; byte[] il = Arrays.copyOfRange(i, 0, 32); byte[] chainCode = Arrays.copyOfRange(i, 32, 64); BigInteger ilInt = new BigInteger(1, il); assertLessThanN(ilInt, "Illegal derived key: I_L >= n"); final ECPoint G = ECKey.CURVE.getG(); final BigInteger N = ECKey.CURVE.getN(); ECPoint Ki; switch (mode) { case NORMAL: Ki = G.multiply(ilInt).add(parent.getPubKeyPoint()); break; case WITH_INVERSION: // This trick comes from Gregory Maxwell. Check the homomorphic properties of our curve hold. The // below calculations should be redundant and give the same result as NORMAL but if the precalculated // tables have taken a bit flip will yield a different answer. This mode is used when vending a key // to perform a last-ditch sanity check trying to catch bad RAM. Ki = G.multiply(ilInt.add(RAND_INT)); BigInteger additiveInverse = RAND_INT.negate().mod(N); Ki = Ki.add(G.multiply(additiveInverse)); Ki = Ki.add(parent.getPubKeyPoint()); break; default: throw new AssertionError(); } assertNonInfinity(Ki, "Illegal derived key: derived public key equals infinity."); return new RawKeyBytes(Ki.getEncoded(true), chainCode); } private static void assertNonZero(BigInteger integer, String errorMessage) { if (integer.equals(BigInteger.ZERO)) throw new HDDerivationException(errorMessage); } private static void assertNonInfinity(ECPoint point, String errorMessage) { if (point.equals(ECKey.CURVE.getCurve().getInfinity())) throw new HDDerivationException(errorMessage); } private static void assertLessThanN(BigInteger integer, String errorMessage) { if (integer.compareTo(ECKey.CURVE.getN()) > 0) throw new HDDerivationException(errorMessage); } public static class RawKeyBytes { public final byte[] keyBytes, chainCode; public RawKeyBytes(byte[] keyBytes, byte[] chainCode) { this.keyBytes = keyBytes; this.chainCode = chainCode; } } }
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.exec.planner.observer; import java.util.List; import java.util.concurrent.CountDownLatch; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelRoot; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.sql.SqlNode; import com.dremio.common.DeferredException; import com.dremio.common.SerializedExecutor; import com.dremio.common.utils.protos.QueryWritableBatch; import com.dremio.exec.catalog.DremioTable; import com.dremio.exec.planner.CachedAccelDetails; import com.dremio.exec.planner.PlannerPhase; import com.dremio.exec.planner.acceleration.DremioMaterialization; import com.dremio.exec.planner.acceleration.substitution.SubstitutionInfo; import com.dremio.exec.planner.fragment.PlanningSet; import com.dremio.exec.planner.physical.Prel; import com.dremio.exec.proto.GeneralRPCProtos.Ack; import com.dremio.exec.proto.UserBitShared.AttemptEvent; import com.dremio.exec.proto.UserBitShared.FragmentRpcSizeStats; import com.dremio.exec.proto.UserBitShared.QueryProfile; import com.dremio.exec.rpc.RpcOutcomeListener; import com.dremio.exec.work.QueryWorkUnit; import com.dremio.exec.work.foreman.ExecutionPlan; import com.dremio.exec.work.protector.UserRequest; import com.dremio.exec.work.protector.UserResult; import com.dremio.reflection.hints.ReflectionExplanationsAndQueryDistance; import com.dremio.resource.ResourceSchedulingDecisionInfo; /** * Does query observations in order but not in the query execution thread. This * ensures two things: * - any blocking commands don't block the underlying thread * - any exceptions don't bleed into the caller. * * Additionally, the observer will report back all exceptions thrown in the callbacks to the delegate * {@link #attemptCompletion(UserResult)} callback */ public class OutOfBandAttemptObserver implements AttemptObserver { private final SerializedExecutor<Runnable> serializedExec; private final AttemptObserver innerObserver; private final DeferredException deferred = new DeferredException(); OutOfBandAttemptObserver(AttemptObserver innerObserver, SerializedExecutor<Runnable> serializedExec) { this.serializedExec = serializedExec; this.innerObserver = innerObserver; } @Override public void beginState(final AttemptEvent event) { execute(() -> innerObserver.beginState(event)); } @Override public void queryStarted(final UserRequest query, final String user) { execute(() -> innerObserver.queryStarted(query, user)); } @Override public void commandPoolWait(long waitInMillis) { execute(() -> innerObserver.commandPoolWait(waitInMillis)); } @Override public void planText(final String text, final long millisTaken) { execute(() -> innerObserver.planText(text, millisTaken)); } @Override public void finalPrel(final Prel prel) { execute(() -> innerObserver.finalPrel(prel)); } @Override public void recordExtraInfo(final String name, final byte[] bytes) { execute(() -> innerObserver.recordExtraInfo(name, bytes)); } @Override public void planRelTransform(final PlannerPhase phase, final RelOptPlanner planner, final RelNode before, final RelNode after, final long millisTaken) { execute(() -> innerObserver.planRelTransform(phase, planner, before, after, millisTaken)); } @Override public void planParallelStart() { execute(innerObserver::planParallelStart); } @Override public void planParallelized(final PlanningSet planningSet) { execute(() -> innerObserver.planParallelized(planningSet)); } @Override public void planFindMaterializations(final long millisTaken) { execute(() -> innerObserver.planFindMaterializations(millisTaken)); } @Override public void planNormalized(final long millisTaken, final List<RelNode> normalizedQueryPlans) { execute(() -> innerObserver.planNormalized(millisTaken, normalizedQueryPlans)); } @Override public void planSubstituted(final DremioMaterialization materialization, final List<RelNode> substitutions, final RelNode target, final long millisTaken, boolean defaultReflection) { execute(() -> innerObserver.planSubstituted(materialization, substitutions, target, millisTaken, defaultReflection)); } @Override public void substitutionFailures(Iterable<String> errors) { execute(() -> innerObserver.substitutionFailures(errors)); } @Override public void planAccelerated(final SubstitutionInfo info) { execute(() -> innerObserver.planAccelerated(info)); } @Override public void applyAccelDetails(final CachedAccelDetails accelDetails) { execute(() -> innerObserver.applyAccelDetails(accelDetails)); } @Override public void planCompleted(final ExecutionPlan plan) { execute(() -> innerObserver.planCompleted(plan)); } @Override public void execStarted(final QueryProfile profile) { execute(() -> innerObserver.execStarted(profile)); } @Override public void execDataArrived(final RpcOutcomeListener<Ack> outcomeListener, final QueryWritableBatch result) { execute(() -> innerObserver.execDataArrived(outcomeListener, result)); } @Override public void planJsonPlan(final String text) { execute(() -> innerObserver.planJsonPlan(text)); } @Override public void planStart(final String rawPlan) { execute(() -> innerObserver.planStart(rawPlan)); } @Override public void planValidated(final RelDataType rowType, final SqlNode node, final long millisTaken) { execute(() -> innerObserver.planValidated(rowType, node, millisTaken)); } @Override public void planCacheUsed(int count) { execute(() -> innerObserver.planCacheUsed(count)); } @Override public void planSerializable(final RelNode serializable) { execute(() -> innerObserver.planSerializable(serializable)); } @Override public void planConvertedToRel(final RelNode converted, final long millisTaken) { execute(() -> innerObserver.planConvertedToRel(converted, millisTaken)); } @Override public void planConvertedScan(final RelNode converted, final long millisTaken) { execute(() -> innerObserver.planConvertedScan(converted, millisTaken)); } @Override public void planExpandView(final RelRoot expanded, final List<String> schemaPath, final int nestingLevel, final String sql) { execute(() -> innerObserver.planExpandView(expanded, schemaPath, nestingLevel, sql)); } @Override public void plansDistributionComplete(final QueryWorkUnit unit) { execute(() -> innerObserver.plansDistributionComplete(unit)); } @Override public void attemptCompletion(final UserResult result) { // make sure we have correct ordering (this should come after all previous observations). final CountDownLatch cd = new CountDownLatch(1); serializedExec.execute(() -> { try { UserResult finalResult = result; if (deferred.hasException()) { finalResult = finalResult.withException(deferred.getAndClear()); } innerObserver.attemptCompletion(finalResult); } finally { cd.countDown(); } }); try{ cd.await(); } catch(InterruptedException ex){ Thread.currentThread().interrupt(); } } @Override public void executorsSelected(long millisTaken, int idealNumFragments, int idealNumNodes, int numExecutors, String detailsText) { execute(() -> innerObserver.executorsSelected(millisTaken, idealNumFragments, idealNumNodes, numExecutors, detailsText)); } @Override public void recordsProcessed(long recordCount) { execute(() -> innerObserver.recordsProcessed(recordCount)); } @Override public void planGenerationTime(final long millisTaken) { execute(() -> innerObserver.planGenerationTime(millisTaken)); } @Override public void planAssignmentTime(final long millisTaken) { execute(() -> innerObserver.planAssignmentTime(millisTaken)); } @Override public void fragmentsStarted(final long millisTaken, FragmentRpcSizeStats stats) { execute(() -> innerObserver.fragmentsStarted(millisTaken, stats)); } @Override public void fragmentsActivated(final long millisTaken) { execute(() -> innerObserver.fragmentsActivated(millisTaken)); } @Override public void activateFragmentFailed(Exception ex) { execute(() -> innerObserver.activateFragmentFailed(ex)); } @Override public void resourcesScheduled(ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo) { execute(() -> innerObserver.resourcesScheduled(resourceSchedulingDecisionInfo)); } @Override public void updateReflectionsWithHints(ReflectionExplanationsAndQueryDistance reflectionExplanationsAndQueryDistance) { execute(() -> innerObserver.updateReflectionsWithHints(reflectionExplanationsAndQueryDistance)); } @Override public void tablesCollected(Iterable<DremioTable> tables) { execute(() -> innerObserver.tablesCollected(tables)); } /** * Wraps the runnable so that any exception thrown will eventually cause the attempt * to fail when handling the {@link #attemptCompletion(UserResult)} callback */ private void execute(Runnable runnable) { serializedExec.execute(() -> { try { runnable.run(); } catch (Throwable ex) { deferred.addThrowable(ex); } }); } }
/* * Copyright (c) 2014 Personal-Health-Monitoring-System * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cse3310.phms.ui.activities; import android.app.AlertDialog; import android.content.DialogInterface; import android.os.Bundle; import android.support.v4.app.NavUtils; import android.view.View; import android.widget.*; import com.actionbarsherlock.app.SherlockFragmentActivity; import com.actionbarsherlock.view.Menu; import com.cse3310.phms.R; import com.cse3310.phms.model.Appointment; import com.cse3310.phms.model.DoctorInfo; import com.cse3310.phms.model.Reminder; import com.cse3310.phms.model.utils.MyDateFormatter; import com.cse3310.phms.ui.services.ReminderAlarm; import com.cse3310.phms.ui.utils.UserSingleton; import com.cse3310.phms.ui.views.AppointmentView; import com.doomonafireball.betterpickers.timepicker.TimePickerBuilder; import com.doomonafireball.betterpickers.timepicker.TimePickerDialogFragment; import org.androidannotations.annotations.*; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; @EActivity(R.layout.add_appointment_form) public class AddAppointmentActivity extends SherlockFragmentActivity implements TimePickerDialogFragment.TimePickerDialogHandler { @ViewById(R.id.add_appointment_select_btn) TextView mDoctorButtonTextView; @ViewById(R.id.add_appointment_time_btn) TextView mTimeButtonTextView; @ViewById(R.id.add_appointment_purpose) EditText mPurposeEditText; @ViewById(R.id.reminder_spinner) Spinner mReminderSpinner; @ViewById(R.id.appointment_view) AppointmentView mAppointmentView; private DoctorInfo mSelectedDoctor; private Date mSelectedDate; private long appointmentTime; private long earlyMillis; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // enable the up/home button in the actionbar getSupportActionBar().setHomeButtonEnabled(true); getSupportActionBar().setDisplayHomeAsUpEnabled(true); mSelectedDate = (Date) getIntent().getSerializableExtra("date"); } @AfterViews void onSetupViews() { if (mSelectedDate != null) { appointmentTime = mSelectedDate.getTime(); } // set spinner to get early reminder time ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this, R.array.early_reminder_chose, android.R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mReminderSpinner.setAdapter(adapter); mReminderSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { switch (position) { case 0: earlyMillis = 0; break; case 1: earlyMillis = TimeUnit.MINUTES.toMillis(5); break; case 2: earlyMillis = TimeUnit.MINUTES.toMillis(10); break; case 3: earlyMillis = TimeUnit.MINUTES.toMillis(30); break; case 4: earlyMillis = TimeUnit.HOURS.toMillis(1); break; case 5: earlyMillis = TimeUnit.HOURS.toMillis(2); break; case 6: earlyMillis = TimeUnit.HOURS.toMillis(12); break; case 7: earlyMillis = TimeUnit.HOURS.toMillis(24); break; case 8: earlyMillis = TimeUnit.DAYS.toMillis(2); break; case 9: earlyMillis = TimeUnit.DAYS.toMillis(7); break; } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); mAppointmentView.setVisibility(View.GONE); mTimeButtonTextView.setText(MyDateFormatter.formatTime(mSelectedDate.getTime())); } @Override public boolean onCreateOptionsMenu(Menu menu) { getSupportMenuInflater().inflate(R.menu.save_menu, menu); return true; } @Click(R.id.add_appointment_select_btn) void handleSelectButtonClick() { final List<DoctorInfo> doctorInfoList = UserSingleton.INSTANCE.getCurrentUser().getDoctors(); final CharSequence[] items = new CharSequence[doctorInfoList.size()]; // get all of the user's doctor name int i = 0; for (DoctorInfo doctorInfo : doctorInfoList) { items[i++] = doctorInfo.getFullName(); } // create a dialog with doctor names AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Choose a Doctor"); builder.setItems(items, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { // update the views with the info from the selected doctor from the dialog mSelectedDoctor = doctorInfoList.get(item); mAppointmentView.setVisibility(View.VISIBLE); mAppointmentView.setText( mSelectedDoctor.getFullName(), mSelectedDoctor.getPhone(), MyDateFormatter.formatDate(appointmentTime), mSelectedDoctor.getHospital() + " - " + mSelectedDoctor.getAddress() ); } }); AlertDialog alert = builder.create(); alert.show(); } @Click(R.id.add_appointment_time_btn) void handleTimeButtonClick() { TimePickerBuilder tpb = new TimePickerBuilder() .setFragmentManager(getSupportFragmentManager()) .setStyleResId(R.style.BetterPickersDialogFragment_Light); tpb.show(); } @OptionsItem(android.R.id.home) void handleHomeButtonClick() { NavUtils.navigateUpFromSameTask(this); // go back to previous activity when clicking the actionbar home } @OptionsItem(R.id.save_icon) void handleSaveIconClick() { if (mSelectedDoctor != null) { Appointment appointment = new Appointment(mSelectedDoctor, appointmentTime); // use default text if no purpose text is enter if (!mPurposeEditText.getText().toString().isEmpty()) { appointment.setPurpose(mPurposeEditText.getText().toString()); } appointment.save(); // save to DB Reminder reminder = new Reminder(appointment, earlyMillis); reminder.save(); new ReminderAlarm(this, reminder, R.drawable.ic_action_calendar_day); // set alarm for this appointment Toast.makeText(this, "Appointment saved", Toast.LENGTH_SHORT).show(); finish(); // close the activity } } @Override public void onDialogTimeSet(int reference, int hourOfDay, int minute) { long mills = TimeUnit.HOURS.toMillis(hourOfDay) + TimeUnit.MINUTES.toMillis(minute); appointmentTime = mSelectedDate.getTime() + mills; mTimeButtonTextView.setText(MyDateFormatter.formatTime(appointmentTime)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.document.secondary; import static java.util.Arrays.asList; import static org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils.registerMBean; import java.util.Collections; import java.util.Hashtable; import java.util.List; import java.util.concurrent.Executor; import javax.annotation.Nonnull; import com.google.common.collect.Lists; import org.apache.jackrabbit.oak.osgi.OsgiWhiteboard; import org.apache.jackrabbit.oak.plugins.document.AbstractDocumentNodeState; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStateCache; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; import org.apache.jackrabbit.oak.plugins.document.NodeStateDiffer; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.commit.BackgroundObserver; import org.apache.jackrabbit.oak.spi.commit.BackgroundObserverMBean; import org.apache.jackrabbit.oak.spi.commit.Observer; import org.apache.jackrabbit.oak.spi.filter.PathFilter; import org.apache.jackrabbit.oak.spi.state.NodeStateDiff; import org.apache.jackrabbit.oak.spi.state.NodeStoreProvider; import org.apache.jackrabbit.oak.spi.whiteboard.Registration; import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard; import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.annotations.Activate; import org.osgi.service.component.annotations.Component; import org.osgi.service.component.annotations.Deactivate; import org.osgi.service.component.annotations.Reference; import org.osgi.service.component.annotations.ReferenceCardinality; import org.osgi.service.component.annotations.ReferencePolicy; import org.osgi.service.metatype.annotations.AttributeDefinition; import org.osgi.service.metatype.annotations.Designate; import org.osgi.service.metatype.annotations.ObjectClassDefinition; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Component @Designate(ocd = SecondaryStoreCacheService.Configuration.class) public class SecondaryStoreCacheService { @ObjectClassDefinition( name = "Apache Jackrabbit Oak DocumentNodeStateCache Provider", description = "Configures a DocumentNodeStateCache based on a secondary NodeStore" ) @interface Configuration { @AttributeDefinition( name = "Included Paths", description = "List of paths which are to be included in the secondary store" ) String[] includedPaths() default {"/"}; @AttributeDefinition( name = "Async Observation", description = "Enable async observation processing" ) boolean enableAsyncObserver() default true; @AttributeDefinition( name = "Observer queue size", description = "Observer queue size. Used if 'enableAsyncObserver' is set to true" ) int observerQueueSize() default BackgroundObserver.DEFAULT_QUEUE_SIZE; } private final Logger log = LoggerFactory.getLogger(getClass()); /** * Having a reference to BlobStore ensures that DocumentNodeStoreService does register a BlobStore */ @Reference private BlobStore blobStore; @Reference(target = "(role=secondary)") private NodeStoreProvider secondaryStoreProvider; @Reference private Executor executor; @Reference private StatisticsProvider statisticsProvider; /* * Have an optional dependency on DocumentNodeStore such that we do not have hard dependency * on it and DocumentNodeStore can make use of this service even after it has unregistered */ @Reference(cardinality = ReferenceCardinality.OPTIONAL, policy = ReferencePolicy.DYNAMIC) private volatile DocumentNodeStore documentNodeStore; private final List<Registration> oakRegs = Lists.newArrayList(); private final List<ServiceRegistration> regs = Lists.newArrayList(); private Whiteboard whiteboard; private BundleContext bundleContext; private PathFilter pathFilter; private final MultiplexingNodeStateDiffer differ = new MultiplexingNodeStateDiffer(); @Activate private void activate(BundleContext context, Configuration config){ bundleContext = context; whiteboard = new OsgiWhiteboard(context); String[] includedPaths = config.includedPaths(); //TODO Support for exclude is not possible as once a NodeState is loaded from secondary //store it assumes that complete subtree is in same store. With exclude it would need to //check for each child access and route to primary pathFilter = new PathFilter(asList(includedPaths), Collections.<String>emptyList()); SecondaryStoreBuilder builder = new SecondaryStoreBuilder(secondaryStoreProvider.getNodeStore()) .differ(differ) .metaPropNames(DocumentNodeStore.META_PROP_NAMES) .statisticsProvider(statisticsProvider) .pathFilter(pathFilter); SecondaryStoreCache cache = builder.buildCache(); SecondaryStoreObserver observer = builder.buildObserver(cache); registerObserver(observer, config); regs.add(bundleContext.registerService(DocumentNodeStateCache.class.getName(), cache, null)); //TODO Need to see OSGi dynamics. Its possible that DocumentNodeStore works after the cache //gets deregistered but the SegmentNodeState instances might still be in use and that would cause //failure } @Deactivate private void deactivate(){ for (Registration r : oakRegs){ r.unregister(); } for (ServiceRegistration r : regs){ r.unregister(); } } PathFilter getPathFilter() { return pathFilter; } protected void bindDocumentNodeStore(DocumentNodeStore documentNodeStore){ log.info("Registering DocumentNodeStore as the differ"); differ.setDelegate(documentNodeStore); } protected void unbindDocumentNodeStore(DocumentNodeStore documentNodeStore){ differ.setDelegate(NodeStateDiffer.DEFAULT_DIFFER); } //~----------------------------------------------------< internal > private void registerObserver(Observer observer, Configuration config) { boolean enableAsyncObserver = config.enableAsyncObserver(); int queueSize = config.observerQueueSize(); if (enableAsyncObserver){ BackgroundObserver bgObserver = new BackgroundObserver(observer, executor, queueSize); oakRegs.add(registerMBean(whiteboard, BackgroundObserverMBean.class, bgObserver.getMBean(), BackgroundObserverMBean.TYPE, "Secondary NodeStore observer stats")); observer = bgObserver; log.info("Configuring the observer for secondary NodeStore as " + "Background Observer with queue size {}", queueSize); } //Ensure that our observer comes first in processing Hashtable<String, Object> props = new Hashtable<>(); props.put(Constants.SERVICE_RANKING, 10000); regs.add(bundleContext.registerService(Observer.class.getName(), observer, props)); } private static class MultiplexingNodeStateDiffer implements NodeStateDiffer { private volatile NodeStateDiffer delegate = NodeStateDiffer.DEFAULT_DIFFER; @Override public boolean compare(@Nonnull AbstractDocumentNodeState node, @Nonnull AbstractDocumentNodeState base, @Nonnull NodeStateDiff diff) { return delegate.compare(node, base, diff); } public void setDelegate(NodeStateDiffer delegate) { this.delegate = delegate; } } }
/* * The MIT License (MIT) * * Copyright (c) 2014 robert.gruendler@dubture.com * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.dubture.jenkins.digitalocean; import java.io.IOException; import java.io.PrintStream; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import com.myjeeva.digitalocean.exception.RequestUnsuccessfulException; import com.myjeeva.digitalocean.impl.DigitalOceanClient; import com.myjeeva.digitalocean.pojo.Droplet; import com.myjeeva.digitalocean.pojo.Image; import com.myjeeva.digitalocean.pojo.Key; import com.myjeeva.digitalocean.pojo.Region; import com.myjeeva.digitalocean.pojo.Size; import hudson.Extension; import hudson.RelativePath; import hudson.Util; import hudson.model.Describable; import hudson.model.Descriptor; import hudson.model.Label; import hudson.model.Node; import hudson.model.labels.LabelAtom; import hudson.slaves.NodeProperty; import hudson.util.ListBoxModel; import hudson.util.StreamTaskListener; import jenkins.model.Jenkins; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import static com.google.common.collect.Lists.newArrayList; /** * A {@link SlaveTemplate} represents the configuration values for creating a new slave via a DigitalOcean droplet. * * <p>Holds things like Image ID, sizeId and region used for the specific droplet. * * <p>The {@link SlaveTemplate#provision(DigitalOceanClient, String, String, Integer, StreamTaskListener)} method * is the main entry point to create a new droplet via the DigitalOcean API when a new slave needs to be provisioned. * * @author robert.gruendler@dubture.com */ @SuppressWarnings("unused") public class SlaveTemplate implements Describable<SlaveTemplate> { private static final String DROPLET_PREFIX = "jenkins-"; private final String labelString; private final int idleTerminationInMinutes; /** * The maximum number of executors that this slave will run. */ private final int numExecutors; private final String labels; /** * The Image to be used for the droplet. */ private final String imageId; /** * The specified droplet sizeId. */ private final String sizeId; /** * The region for the droplet. */ private final String regionId; /** * User-supplied data for configuring a droplet */ private final String userData; /** * Setup script for preparing the new slave. Differs from userData in that Jenkins runs this script, * as opposed to the DigitalOcean provisioning process. */ private final String initScript; private transient Set<LabelAtom> labelSet; protected transient Cloud parent; private static final Logger LOGGER = Logger.getLogger(SlaveTemplate.class.getName()); /** * Data is injected from the global Jenkins configuration via jelly. * @param imageId an image slug e.g. "debian-8-x64", or an integer e.g. of a backup, such as "12345678" * @param sizeId the image size e.g. "512mb" or "1gb" * @param regionId the region e.g. "nyc1" * @param idleTerminationInMinutes how long to wait before destroying a slave * @param numExecutors the number of executors that this slave supports * @param labelString the label for this slave * @param userData user data for DigitalOcean to apply when building the slave * @param initScript setup script to configure the slave */ @DataBoundConstructor public SlaveTemplate(String imageId, String sizeId, String regionId, String idleTerminationInMinutes, String numExecutors, String labelString, String userData, String initScript) { LOGGER.log(Level.INFO, "Creating SlaveTemplate with imageId = {0}, sizeId = {1}, regionId = {2}", new Object[] { imageId, sizeId, regionId}); this.imageId = imageId; this.sizeId = sizeId; this.regionId = regionId; this.idleTerminationInMinutes = tryParseInteger(idleTerminationInMinutes, 10); this.numExecutors = tryParseInteger(numExecutors, 1); this.labelString = labelString; this.labels = Util.fixNull(labelString); this.userData = userData; this.initScript = initScript; readResolve(); } /** * Creates a new droplet on DigitalOcean to be used as a Jenkins slave. * * @param apiClient the v2 API client to use * @param privateKey the RSA private key to use * @param sshKeyId the SSH key name name to use * @param listener the listener on which to report progress * @return the provisioned {@link Slave} * @throws IOException * @throws RequestUnsuccessfulException * @throws Descriptor.FormException */ public Slave provision(DigitalOceanClient apiClient, String dropletName, String privateKey, Integer sshKeyId, StreamTaskListener listener) throws IOException, RequestUnsuccessfulException, Descriptor.FormException { LOGGER.log(Level.INFO, "Provisioning slave..."); PrintStream logger = listener.getLogger(); try { logger.printf("Starting to provision digital ocean droplet using image: %s, region: %s, sizeId: %s%n", imageId, regionId, sizeId); // create a new droplet Droplet droplet = new Droplet(); droplet.setName(dropletName); droplet.setSize(sizeId); droplet.setRegion(new Region(regionId)); droplet.setImage(DigitalOcean.newImage(imageId)); droplet.setKeys(newArrayList(new Key(sshKeyId))); if (!(userData == null || userData.trim().isEmpty())) { droplet.setUserData(userData); } logger.println("Creating slave with new droplet " + dropletName); Droplet createdDroplet = apiClient.createDroplet(droplet); return newSlave(createdDroplet, privateKey); } catch (Exception e) { e.printStackTrace(logger); throw new AssertionError(); } } /** * Create a new {@link Slave} from the given {@link Droplet} * @param droplet the droplet being created * @param privateKey the RSA private key being used * @return the provisioned {@link Slave} * @throws IOException * @throws Descriptor.FormException */ private Slave newSlave(Droplet droplet, String privateKey) throws IOException, Descriptor.FormException { LOGGER.log(Level.INFO, "Creating new slave..."); return new Slave( getParent().getName(), droplet.getName(), "Computer running on DigitalOcean with name: " + droplet.getName(), droplet.getId(), privateKey, "/jenkins", "root", numExecutors, idleTerminationInMinutes, userData, Node.Mode.NORMAL, labels, new ComputerLauncher(), new RetentionStrategy(), Collections.<NodeProperty<?>>emptyList(), Util.fixNull(initScript), "" ); } @Extension public static final class DescriptorImpl extends Descriptor<SlaveTemplate> { @Override public String getDisplayName() { return null; } public ListBoxModel doFillSizeIdItems(@RelativePath("..") @QueryParameter String authToken) throws Exception { List<Size> availableSizes = DigitalOcean.getAvailableSizes(authToken); ListBoxModel model = new ListBoxModel(); for (Size size : availableSizes) { model.add(DigitalOcean.buildSizeLabel(size), size.getSlug()); } return model; } public ListBoxModel doFillImageIdItems(@RelativePath("..") @QueryParameter String authToken) throws Exception { SortedMap<String, Image> availableImages = DigitalOcean.getAvailableImages(authToken); ListBoxModel model = new ListBoxModel(); for (Map.Entry<String, Image> entry : availableImages.entrySet()) { final Image image = entry.getValue(); // For non-snapshots, use the image ID instead of the slug (which isn't available anyway) // so that we can build images based upon backups. final String value = DigitalOcean.getImageIdentifier(image); model.add(entry.getKey(), value); } return model; } public ListBoxModel doFillRegionIdItems(@RelativePath("..") @QueryParameter String authToken) throws Exception { List<Region> availableSizes = DigitalOcean.getAvailableRegions(authToken); ListBoxModel model = new ListBoxModel(); for (Region region : availableSizes) { model.add(region.getName(), region.getSlug()); } return model; } } @SuppressWarnings("unchecked") public Descriptor<SlaveTemplate> getDescriptor() { return Jenkins.getInstance().getDescriptor(getClass()); } public String createDropletName() { return DROPLET_PREFIX + UUID.randomUUID().toString(); } public String getSizeId() { return sizeId; } public String getRegionId() { return regionId; } public String getLabels() { return labels; } public String getLabelString() { return labelString; } public Set<LabelAtom> getLabelSet() { return labelSet; } public Cloud getParent() { return parent; } public String getImageId() { return imageId; } public int getNumExecutors() { return numExecutors; } public int getIdleTerminationInMinutes() { return idleTerminationInMinutes; } public String getUserData() { return userData; } public String getInitScript() { return initScript; } private static int tryParseInteger(final String integerString, final int defaultValue) { try { return Integer.parseInt(integerString); } catch (NumberFormatException e) { LOGGER.log(Level.INFO, "Invalid integer {0}, defaulting to {1}", new Object[] {integerString, defaultValue}); return defaultValue; } } protected Object readResolve() { labelSet = Label.parse(labels); return this; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.api.task; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.task.Task; import org.flowable.engine.task.TaskQuery; import org.flowable.engine.test.Deployment; /** * @author Tijs Rademakers */ public class TaskAndVariablesQueryTest extends PluggableFlowableTestCase { private List<String> taskIds; private List<String> multipleTaskIds; public void setUp() throws Exception { identityService.saveUser(identityService.newUser("kermit")); identityService.saveUser(identityService.newUser("gonzo")); identityService.saveUser(identityService.newUser("fozzie")); identityService.saveGroup(identityService.newGroup("management")); identityService.saveGroup(identityService.newGroup("accountancy")); identityService.createMembership("kermit", "management"); identityService.createMembership("kermit", "accountancy"); identityService.createMembership("fozzie", "management"); taskIds = generateTestTasks(); } public void tearDown() throws Exception { identityService.deleteGroup("accountancy"); identityService.deleteGroup("management"); identityService.deleteUser("fozzie"); identityService.deleteUser("gonzo"); identityService.deleteUser("kermit"); taskService.deleteTasks(taskIds, true); } @Deployment public void testQuery() { Task task = taskService.createTaskQuery().includeTaskLocalVariables().taskAssignee("gonzo").singleResult(); Map<String, Object> variableMap = task.getTaskLocalVariables(); assertEquals(3, variableMap.size()); assertEquals(0, task.getProcessVariables().size()); assertNotNull(variableMap.get("testVar")); assertEquals("someVariable", variableMap.get("testVar")); assertNotNull(variableMap.get("testVar2")); assertEquals(123, variableMap.get("testVar2")); assertNotNull(variableMap.get("testVarBinary")); assertEquals("This is a binary variable", new String((byte[]) variableMap.get("testVarBinary"))); List<Task> tasks = taskService.createTaskQuery().list(); assertEquals(3, tasks.size()); task = taskService.createTaskQuery().includeProcessVariables().taskAssignee("gonzo").singleResult(); assertEquals(0, task.getProcessVariables().size()); assertEquals(0, task.getTaskLocalVariables().size()); Map<String, Object> startMap = new HashMap<>(); startMap.put("processVar", true); startMap.put("binaryVariable", "This is a binary process variable".getBytes()); runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap); task = taskService.createTaskQuery().includeProcessVariables().taskAssignee("kermit").singleResult(); assertEquals(2, task.getProcessVariables().size()); assertEquals(0, task.getTaskLocalVariables().size()); assertTrue((Boolean) task.getProcessVariables().get("processVar")); assertEquals("This is a binary process variable", new String((byte[]) task.getProcessVariables().get("binaryVariable"))); taskService.setVariable(task.getId(), "anotherProcessVar", 123); taskService.setVariableLocal(task.getId(), "localVar", "test"); task = taskService.createTaskQuery().includeTaskLocalVariables().taskAssignee("kermit").singleResult(); assertEquals(0, task.getProcessVariables().size()); assertEquals(1, task.getTaskLocalVariables().size()); assertEquals("test", task.getTaskLocalVariables().get("localVar")); task = taskService.createTaskQuery().includeProcessVariables().taskAssignee("kermit").singleResult(); assertEquals(3, task.getProcessVariables().size()); assertEquals(0, task.getTaskLocalVariables().size()); assertEquals(true, task.getProcessVariables().get("processVar")); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); assertEquals("This is a binary process variable", new String((byte[]) task.getProcessVariables().get("binaryVariable"))); tasks = taskService.createTaskQuery().includeTaskLocalVariables().taskCandidateUser("kermit").list(); assertEquals(2, tasks.size()); assertEquals(2, tasks.get(0).getTaskLocalVariables().size()); assertEquals("test", tasks.get(0).getTaskLocalVariables().get("test")); assertEquals(0, tasks.get(0).getProcessVariables().size()); tasks = taskService.createTaskQuery().includeProcessVariables().taskCandidateUser("kermit").list(); assertEquals(2, tasks.size()); assertEquals(0, tasks.get(0).getProcessVariables().size()); assertEquals(0, tasks.get(0).getTaskLocalVariables().size()); task = taskService.createTaskQuery().includeTaskLocalVariables().taskAssignee("kermit").taskVariableValueEquals("localVar", "test").singleResult(); assertEquals(0, task.getProcessVariables().size()); assertEquals(1, task.getTaskLocalVariables().size()); assertEquals("test", task.getTaskLocalVariables().get("localVar")); task = taskService.createTaskQuery().includeProcessVariables().taskAssignee("kermit").taskVariableValueEquals("localVar", "test").singleResult(); assertEquals(3, task.getProcessVariables().size()); assertEquals(0, task.getTaskLocalVariables().size()); assertEquals(true, task.getProcessVariables().get("processVar")); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); task = taskService.createTaskQuery().includeTaskLocalVariables().includeProcessVariables().taskAssignee("kermit").singleResult(); assertEquals(3, task.getProcessVariables().size()); assertEquals(1, task.getTaskLocalVariables().size()); assertEquals("test", task.getTaskLocalVariables().get("localVar")); assertEquals(true, task.getProcessVariables().get("processVar")); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); assertEquals("This is a binary process variable", new String((byte[]) task.getProcessVariables().get("binaryVariable"))); } public void testQueryWithPagingAndVariables() { List<Task> tasks = taskService.createTaskQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().desc().listPage(0, 1); assertEquals(1, tasks.size()); Task task = tasks.get(0); Map<String, Object> variableMap = task.getTaskLocalVariables(); assertEquals(3, variableMap.size()); assertEquals("someVariable", variableMap.get("testVar")); assertEquals(123, variableMap.get("testVar2")); assertEquals("This is a binary variable", new String((byte[]) variableMap.get("testVarBinary"))); tasks = taskService.createTaskQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(1, 2); assertEquals(2, tasks.size()); task = tasks.get(1); variableMap = task.getTaskLocalVariables(); assertEquals(3, variableMap.size()); assertEquals("someVariable", variableMap.get("testVar")); assertEquals(123, variableMap.get("testVar2")); assertEquals("This is a binary variable", new String((byte[]) variableMap.get("testVarBinary"))); tasks = taskService.createTaskQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(2, 4); assertEquals(1, tasks.size()); task = tasks.get(0); variableMap = task.getTaskLocalVariables(); assertEquals(3, variableMap.size()); assertEquals("someVariable", variableMap.get("testVar")); assertEquals(123, variableMap.get("testVar2")); assertEquals("This is a binary variable", new String((byte[]) variableMap.get("testVarBinary"))); tasks = taskService.createTaskQuery().includeProcessVariables().includeTaskLocalVariables().orderByTaskPriority().asc().listPage(4, 2); assertEquals(0, tasks.size()); } // Unit test for https://activiti.atlassian.net/browse/ACT-4152 public void testQueryWithIncludeTaskVariableAndTaskCategory() { List<Task> tasks = taskService.createTaskQuery().taskAssignee("gonzo").list(); for (Task task : tasks) { assertNotNull(task.getCategory()); assertEquals("testCategory", task.getCategory()); } tasks = taskService.createTaskQuery().taskAssignee("gonzo").includeTaskLocalVariables().list(); for (Task task : tasks) { assertNotNull(task.getCategory()); assertEquals("testCategory", task.getCategory()); } tasks = taskService.createTaskQuery().taskAssignee("gonzo").includeProcessVariables().list(); for (Task task : tasks) { assertNotNull(task.getCategory()); assertEquals("testCategory", task.getCategory()); } } public void testQueryWithLimitAndVariables() throws Exception { int taskVariablesLimit = 2000; int expectedNumberOfTasks = 103; try { // setup - create 100 tasks multipleTaskIds = generateMultipleTestTasks(); // limit results to 2000 and set maxResults for paging to 200 // please see MNT-16040 List<Task> tasks = taskService.createTaskQuery() .includeProcessVariables() .includeTaskLocalVariables() .limitTaskVariables(taskVariablesLimit) .orderByTaskPriority() .asc() .listPage(0, 200); // 100 tasks created by generateMultipleTestTasks and 3 created previously at setUp assertEquals(expectedNumberOfTasks, tasks.size()); tasks = taskService.createTaskQuery() .includeProcessVariables() .includeTaskLocalVariables() .orderByTaskPriority() .limitTaskVariables(taskVariablesLimit) .asc() .listPage(50, 100); assertEquals(53, tasks.size()); } finally { taskService.deleteTasks(multipleTaskIds, true); } } @Deployment public void testOrQuery() { Map<String, Object> startMap = new HashMap<>(); startMap.put("anotherProcessVar", 123); runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap); Task task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("undefined", 999).processVariableValueEquals("anotherProcessVar", 123).endOr().singleResult(); assertEquals(1, task.getProcessVariables().size()); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("undefined", 999).endOr().singleResult(); assertNull(task); task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 123).processVariableValueEquals("undefined", 999).endOr().singleResult(); assertEquals(1, task.getProcessVariables().size()); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 123).endOr().singleResult(); assertEquals(1, task.getProcessVariables().size()); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 999).endOr().singleResult(); assertNull(task); task = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 999).processVariableValueEquals("anotherProcessVar", 123).endOr().singleResult(); assertEquals(1, task.getProcessVariables().size()); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); } @Deployment public void testOrQueryMultipleVariableValues() { Map<String, Object> startMap = new HashMap<>(); startMap.put("aProcessVar", 1); startMap.put("anotherProcessVar", 123); runtimeService.startProcessInstanceByKey("oneTaskProcess", startMap); TaskQuery query0 = taskService.createTaskQuery().includeProcessVariables().or(); for (int i = 0; i < 20; i++) { query0 = query0.processVariableValueEquals("anotherProcessVar", i); } query0 = query0.endOr(); assertNull(query0.singleResult()); TaskQuery query1 = taskService.createTaskQuery().includeProcessVariables().or().processVariableValueEquals("anotherProcessVar", 123); for (int i = 0; i < 20; i++) { query1 = query1.processVariableValueEquals("anotherProcessVar", i); } query1 = query1.endOr(); Task task = query1.singleResult(); assertEquals(2, task.getProcessVariables().size()); assertEquals(123, task.getProcessVariables().get("anotherProcessVar")); } /** * Generates some test tasks. - 2 tasks where kermit is a candidate and 1 task where gonzo is assignee */ private List<String> generateTestTasks() throws Exception { List<String> ids = new ArrayList<>(); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss.SSS"); // 2 tasks for kermit processEngineConfiguration.getClock().setCurrentTime(sdf.parse("01/01/2001 01:01:01.000")); for (int i = 0; i < 2; i++) { Task task = taskService.newTask(); task.setName("testTask"); task.setDescription("testTask description"); task.setPriority(3); taskService.saveTask(task); ids.add(task.getId()); taskService.setVariableLocal(task.getId(), "test", "test"); taskService.setVariableLocal(task.getId(), "testBinary", "This is a binary variable".getBytes()); taskService.addCandidateUser(task.getId(), "kermit"); } processEngineConfiguration.getClock().setCurrentTime(sdf.parse("02/02/2002 02:02:02.000")); // 1 task for gonzo Task task = taskService.newTask(); task.setName("gonzoTask"); task.setDescription("gonzo description"); task.setPriority(4); task.setCategory("testCategory"); taskService.saveTask(task); taskService.setAssignee(task.getId(), "gonzo"); taskService.setVariableLocal(task.getId(), "testVar", "someVariable"); taskService.setVariableLocal(task.getId(), "testVarBinary", "This is a binary variable".getBytes()); taskService.setVariableLocal(task.getId(), "testVar2", 123); ids.add(task.getId()); return ids; } /** * Generates 100 test tasks. */ private List<String> generateMultipleTestTasks() throws Exception { List<String> ids = new ArrayList<>(); SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss.SSS"); processEngineConfiguration.getClock().setCurrentTime(sdf.parse("01/01/2001 01:01:01.000")); for (int i = 0; i < 100; i++) { Task task = taskService.newTask(); task.setName("testTask"); task.setDescription("testTask description"); task.setPriority(3); taskService.saveTask(task); ids.add(task.getId()); taskService.setVariableLocal(task.getId(), "test", "test"); taskService.setVariableLocal(task.getId(), "testBinary", "This is a binary variable".getBytes()); taskService.addCandidateUser(task.getId(), "kermit"); } return ids; } }
package io.fabric8.maven.docker.service; /* * * Copyright 2014 Roland Huss * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import io.fabric8.maven.docker.model.Container; import io.fabric8.maven.docker.log.LogOutputSpecFactory; import io.fabric8.maven.docker.access.*; import io.fabric8.maven.docker.config.*; import io.fabric8.maven.docker.model.Network; import io.fabric8.maven.docker.util.*; import io.fabric8.maven.docker.wait.WaitUtil; import io.fabric8.maven.docker.wait.WaitTimeoutException; import static io.fabric8.maven.docker.util.VolumeBindingUtil.resolveRelativeVolumeBindings; /** * Service class for helping in running containers. * * @author roland * @since 16/06/15 */ public class RunService { // logger delegated from top private Logger log; // Action to be used when doing a shutdown final private ContainerTracker tracker; // DAO for accessing the docker daemon private DockerAccess docker; private QueryService queryService; private final LogOutputSpecFactory logConfig; public RunService(DockerAccess docker, QueryService queryService, ContainerTracker tracker, LogOutputSpecFactory logConfig, Logger log) { this.docker = docker; this.queryService = queryService; this.tracker = tracker; this.log = log; this.logConfig = logConfig; } /** * Create and start a Exec container with the given image configuration. * @param containerId container id to run exec command against * @param command command to execute * @param imageConfiguration configuration of the container's image * @return the exec container id * * @throws DockerAccessException if access to the docker backend fails */ public String execInContainer(String containerId, String command, ImageConfiguration imageConfiguration) throws DockerAccessException { Arguments arguments = new Arguments(); arguments.setExec(Arrays.asList(EnvUtil.splitOnSpaceWithEscape(command))); String execContainerId = docker.createExecContainer(containerId, arguments); docker.startExecContainer(execContainerId, logConfig.createSpec(containerId, imageConfiguration)); return execContainerId; } /** * Create and start a container with the given image configuration. * @param imageConfig image configuration holding the run information and the image name * @param portMapping container port mapping * @param mavenProps properties to fill in with dynamically assigned ports * @param pomLabel label to tag the started container with * * @return the container id * * @throws DockerAccessException if access to the docker backend fails */ public String createAndStartContainer(ImageConfiguration imageConfig, PortMapping portMapping, PomLabel pomLabel, Properties mavenProps, File baseDir) throws DockerAccessException { RunImageConfiguration runConfig = imageConfig.getRunConfiguration(); String imageName = imageConfig.getName(); String containerName = calculateContainerName(imageConfig.getAlias(), runConfig.getNamingStrategy()); ContainerCreateConfig config = createContainerConfig(imageName, runConfig, portMapping, pomLabel, mavenProps, baseDir); String id = docker.createContainer(config, containerName); startContainer(imageConfig, id, pomLabel); if (portMapping.needsPropertiesUpdate()) { updateMappedPortsAndAddresses(id, portMapping); } return id; } /** * Stop a container immediately by id. * @param containerId the container to stop * @param imageConfig image configuration for this container * @param keepContainer whether to keep container or to remove them after stoppings * @param removeVolumes whether to remove volumes after stopping */ public void stopContainer(String containerId, ImageConfiguration imageConfig, boolean keepContainer, boolean removeVolumes) throws DockerAccessException { ContainerTracker.ContainerShutdownDescriptor descriptor = new ContainerTracker.ContainerShutdownDescriptor(imageConfig, containerId); shutdown(descriptor, keepContainer, removeVolumes); } /** * Lookup up whether a certain has been already started and registered. If so, stop it * * @param containerId the container to stop * @param keepContainer whether to keep container or to remove them after stoppings * @param removeVolumes whether to remove volumes after stopping * * @throws DockerAccessException */ public void stopPreviouslyStartedContainer(String containerId, boolean keepContainer, boolean removeVolumes) throws DockerAccessException { ContainerTracker.ContainerShutdownDescriptor descriptor = tracker.removeContainer(containerId); if (descriptor != null) { shutdown(descriptor, keepContainer, removeVolumes); } } /** * Stop all registered container * @param keepContainer whether to keep container or to remove them after stoppings * @param removeVolumes whether to remove volumes after stopping * * @throws DockerAccessException if during stopping of a container sth fails */ public void stopStartedContainers(boolean keepContainer, boolean removeVolumes, boolean removeCustomNetworks, PomLabel pomLabel) throws DockerAccessException { Set<Network> networksToRemove = new HashSet<>(); for (ContainerTracker.ContainerShutdownDescriptor descriptor : tracker.removeShutdownDescriptors(pomLabel)) { collectCustomNetworks(networksToRemove, descriptor, removeCustomNetworks); shutdown(descriptor, keepContainer, removeVolumes); } removeCustomNetworks(networksToRemove); } private void collectCustomNetworks(Set<Network> networksToRemove, ContainerTracker.ContainerShutdownDescriptor descriptor, boolean removeCustomNetworks) throws DockerAccessException { final NetworkConfig config = descriptor.getImageConfiguration().getRunConfiguration().getNetworkingConfig(); if (removeCustomNetworks && config.isCustomNetwork()) { networksToRemove.add(queryService.getNetworkByName(config.getCustomNetwork())); } } /** * Lookup a container that has been started * * @param lookup a container by id or alias * @return the container id if the container exists, <code>null</code> otherwise. */ public String lookupContainer(String lookup) { return tracker.lookupContainer(lookup); } /** * Get the proper order for images to start * @param images list of images for which the order should be created * @return list of images in the right startup order */ public List<StartOrderResolver.Resolvable> getImagesConfigsInOrder(QueryService queryService, List<ImageConfiguration> images) { return StartOrderResolver.resolve(queryService, convertToResolvables(images)); } /** * Create port mapping for a specific configuration as it can be used when creating containers * * @param runConfig the cun configuration * @param properties properties to lookup variables * @return the portmapping */ public PortMapping createPortMapping(RunImageConfiguration runConfig, Properties properties) { try { return new PortMapping(runConfig.getPorts(), properties); } catch (IllegalArgumentException exp) { throw new IllegalArgumentException("Cannot parse port mapping", exp); } } /** * Add a shutdown hook in order to stop all registered containers */ public void addShutdownHookForStoppingContainers(final boolean keepContainer, final boolean removeVolumes, final boolean removeCustomNetworks) { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { stopStartedContainers(keepContainer, removeVolumes, removeCustomNetworks, null); } catch (DockerAccessException e) { log.error("Error while stopping containers: %s", e.getMessage()); } } }); } private List<StartOrderResolver.Resolvable> convertToResolvables(List<ImageConfiguration> images) { List<StartOrderResolver.Resolvable> ret = new ArrayList<>(); for (ImageConfiguration config : images) { if (config.getRunConfiguration().skip()) { log.info("%s: Skipped running", config.getDescription()); } else { ret.add(config); } } return ret; } // visible for testing ContainerCreateConfig createContainerConfig(String imageName, RunImageConfiguration runConfig, PortMapping mappedPorts, PomLabel pomLabel, Properties mavenProps, File baseDir) throws DockerAccessException { try { ContainerCreateConfig config = new ContainerCreateConfig(imageName) .hostname(runConfig.getHostname()) .domainname(runConfig.getDomainname()) .user(runConfig.getUser()) .workingDir(runConfig.getWorkingDir()) .entrypoint(runConfig.getEntrypoint()) .exposedPorts(mappedPorts.getContainerPorts()) .environment(runConfig.getEnvPropertyFile(), runConfig.getEnv(), mavenProps) .labels(mergeLabels(runConfig.getLabels(), pomLabel)) .command(runConfig.getCmd()) .hostConfig(createContainerHostConfig(runConfig, mappedPorts, baseDir)); RunVolumeConfiguration volumeConfig = runConfig.getVolumeConfiguration(); if (volumeConfig != null) { resolveRelativeVolumeBindings(baseDir, volumeConfig); config.binds(volumeConfig.getBind()); } NetworkConfig networkConfig = runConfig.getNetworkingConfig(); if(networkConfig.isCustomNetwork() && networkConfig.hasAliases()) { ContainerNetworkingConfig networkingConfig = new ContainerNetworkingConfig() .aliases(networkConfig); config.networkingConfig(networkingConfig); } return config; } catch (IllegalArgumentException e) { throw new IllegalArgumentException(String.format("Failed to create contained configuration for [%s]", imageName), e); } } private Map<String, String> mergeLabels(Map<String, String> labels, PomLabel runIdLabel) { Map<String,String> ret = new HashMap<>(); if (labels != null) { ret.putAll(labels); } if (runIdLabel != null) { ret.put(runIdLabel.getKey(), runIdLabel.getValue()); } return ret; } ContainerHostConfig createContainerHostConfig(RunImageConfiguration runConfig, PortMapping mappedPorts, File baseDir) throws DockerAccessException { RestartPolicy restartPolicy = runConfig.getRestartPolicy(); List<String> links = findContainerIdsForLinks(runConfig.getLinks(), runConfig.getNetworkingConfig().isCustomNetwork()); ContainerHostConfig config = new ContainerHostConfig() .extraHosts(runConfig.getExtraHosts()) .links(links) .portBindings(mappedPorts) .privileged(runConfig.getPrivileged()) .shmSize(runConfig.getShmSize()) .dns(runConfig.getDns()) .dnsSearch(runConfig.getDnsSearch()) .capAdd(runConfig.getCapAdd()) .capDrop(runConfig.getCapDrop()) .securityOpts(runConfig.getSecurityOpts()) .memory(runConfig.getMemory()) .memorySwap(runConfig.getMemorySwap()) .restartPolicy(restartPolicy.getName(), restartPolicy.getRetry()) .logConfig(runConfig.getLogConfiguration()) .tmpfs(runConfig.getTmpfs()) .ulimits(runConfig.getUlimits()); addVolumeConfig(config, runConfig, baseDir); addNetworkingConfig(config, runConfig); return config; } private void addNetworkingConfig(ContainerHostConfig config, RunImageConfiguration runConfig) throws DockerAccessException { NetworkConfig networkConfig = runConfig.getNetworkingConfig(); if (networkConfig.isStandardNetwork()) { String alias = networkConfig.getContainerAlias(); String containerId = alias != null ? findContainerId(alias, false) : null; config.networkMode(networkConfig.getStandardMode(containerId)); } else if (networkConfig.isCustomNetwork()) { config.networkMode(networkConfig.getCustomNetwork()); } } private void addVolumeConfig(ContainerHostConfig config, RunImageConfiguration runConfig, File baseDir) throws DockerAccessException { RunVolumeConfiguration volConfig = runConfig.getVolumeConfiguration(); if (volConfig != null) { resolveRelativeVolumeBindings(baseDir, volConfig); config.binds(volConfig.getBind()) .volumesFrom(findVolumesFromContainers(volConfig.getFrom())); } } private List<String> findContainerIdsForLinks(List<String> links, boolean leaveUnresolvedIfNotFound) throws DockerAccessException { List<String> ret = new ArrayList<>(); for (String[] link : EnvUtil.splitOnLastColon(links)) { String id = findContainerId(link[0], false); if (id != null) { ret.add(queryService.getContainerName(id) + ":" + link[1]); } else if (leaveUnresolvedIfNotFound) { ret.add(link[0] + ":" + link[1]); } else { throw new DockerAccessException("No container found for image/alias '%s', unable to link", link[0]); } } return ret.size() != 0 ? ret : null; } // visible for testing private List<String> findVolumesFromContainers(List<String> images) throws DockerAccessException { List<String> list = new ArrayList<>(); if (images != null) { for (String image : images) { String id = findContainerId(image, true); if (id == null) { throw new DockerAccessException("No container found for image/alias '%s', unable to mount volumes", image); } list.add(queryService.getContainerName(id)); } } return list; } private String calculateContainerName(String alias, RunImageConfiguration.NamingStrategy namingStrategy) { if (namingStrategy == RunImageConfiguration.NamingStrategy.none) { return null; } if (alias == null) { throw new IllegalArgumentException("A naming scheme 'alias' requires an image alias to be set"); } return alias; } // checkAllContainers: false = only running containers are considered private String findContainerId(String imageNameOrAlias, boolean checkAllContainers) throws DockerAccessException { String id = lookupContainer(imageNameOrAlias); // check for external container. The image name is interpreted as a *container name* for that case ... if (id == null) { Container container = queryService.getContainer(imageNameOrAlias); if (container != null && (checkAllContainers || container.isRunning())) { id = container.getId(); } } return id; } private void startContainer(ImageConfiguration imageConfig, String id, PomLabel pomLabel) throws DockerAccessException { log.info("%s: Start container %s",imageConfig.getDescription(), id); docker.startContainer(id); tracker.registerContainer(id, imageConfig, pomLabel); } private void updateMappedPortsAndAddresses(String containerId, PortMapping mappedPorts) throws DockerAccessException { Container container = queryService.getMandatoryContainer(containerId); if (container.isRunning()) { mappedPorts.updateProperties(container.getPortBindings()); } else { log.warn("Container %s is not running anymore, can not extract dynamic ports",containerId); } } private void shutdown(ContainerTracker.ContainerShutdownDescriptor descriptor, boolean keepContainer, boolean removeVolumes) throws DockerAccessException { String containerId = descriptor.getContainerId(); if (descriptor.getPreStop() != null) { try { execInContainer(containerId, descriptor.getPreStop(), descriptor.getImageConfiguration()); } catch (DockerAccessException e) { log.error("%s", e.getMessage()); } } int killGracePeriod = adjustGracePeriod(descriptor.getKillGracePeriod()); log.debug("shutdown will wait max of %d seconds before removing container", killGracePeriod); long waited; if (killGracePeriod == 0) { docker.stopContainer(containerId, 0); waited = 0; } else { waited = shutdownAndWait(containerId, killGracePeriod); } if (!keepContainer) { removeContainer(descriptor, removeVolumes, containerId); } log.info("%s: Stop%s container %s after %s ms", descriptor.getDescription(), (keepContainer ? "" : " and removed"), containerId.substring(0, 12), waited); } public void createCustomNetworkIfNotExistant(String customNetwork) throws DockerAccessException { if (!queryService.hasNetwork(customNetwork)) { docker.createNetwork(new NetworkCreateConfig(customNetwork)); } else { log.debug("Custom Network " + customNetwork + " found"); } } public void removeCustomNetworks(Collection<Network> networks) throws DockerAccessException { for (Network network : networks) { docker.removeNetwork(network.getId()); } } private int adjustGracePeriod(int gracePeriod) { int killGracePeriodInSeconds = (gracePeriod + 500) / 1000; if (gracePeriod != 0 && killGracePeriodInSeconds == 0) { log.warn("A kill grace period of %d ms leads to no wait at all since its rounded to seconds. " + "Please use at least 500 as value for wait.kill", gracePeriod); } return killGracePeriodInSeconds; } private void removeContainer(ContainerTracker.ContainerShutdownDescriptor descriptor, boolean removeVolumes, String containerId) throws DockerAccessException { int shutdownGracePeriod = descriptor.getShutdownGracePeriod(); if (shutdownGracePeriod != 0) { log.debug("Shutdown: Wait %d ms before removing container", shutdownGracePeriod); WaitUtil.sleep(shutdownGracePeriod); } // Remove the container docker.removeContainer(containerId, removeVolumes); } private long shutdownAndWait(final String containerId, final int killGracePeriodInSeconds) throws DockerAccessException { long waited; try { waited = WaitUtil.wait(killGracePeriodInSeconds, new Callable<Void>() { @Override public Void call() throws Exception { docker.stopContainer(containerId, killGracePeriodInSeconds); return null; } }); } catch (ExecutionException e) { if (e.getCause() instanceof DockerAccessException) { throw (DockerAccessException) e.getCause(); } else { throw new DockerAccessException(e, "failed to stop container id [%s]", containerId); } } catch (WaitTimeoutException e) { waited = e.getWaited(); log.warn("Stop container id [%s] timed out after %s ms", containerId, waited); } return waited; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.affinity; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.nio.ByteBuffer; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; /** * */ public class AffinityTopologyVersion implements Comparable<AffinityTopologyVersion>, Externalizable, Message { /** */ private static final long serialVersionUID = 0L; /** */ public static final AffinityTopologyVersion NONE = new AffinityTopologyVersion(-1, 0); /** */ public static final AffinityTopologyVersion ZERO = new AffinityTopologyVersion(0, 0); /** */ private long topVer; /** */ private int minorTopVer; /** * Empty constructor required by {@link Externalizable}. */ public AffinityTopologyVersion() { // No-op. } /** * @param topVer Topology version. */ public AffinityTopologyVersion(long topVer) { this.topVer = topVer; } /** * @param topVer Topology version. * @param minorTopVer Minor topology version. */ public AffinityTopologyVersion( long topVer, int minorTopVer ) { this.topVer = topVer; this.minorTopVer = minorTopVer; } /** * @return Topology version. */ public long topologyVersion() { return topVer; } /** * @return Minor topology version. */ public int minorTopologyVersion() { return minorTopVer; } /** {@inheritDoc} */ @Override public int compareTo(AffinityTopologyVersion o) { int cmp = Long.compare(topVer, o.topVer); if (cmp == 0) return Integer.compare(minorTopVer, o.minorTopVer); return cmp; } /** {@inheritDoc} */ @Override public void onAckReceived() { // No-op. } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof AffinityTopologyVersion)) return false; AffinityTopologyVersion that = (AffinityTopologyVersion)o; return minorTopVer == that.minorTopVer && topVer == that.topVer; } /** {@inheritDoc} */ @Override public int hashCode() { return 31 * (int)topVer + minorTopVer; } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeLong(topVer); out.writeInt(minorTopVer); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { topVer = in.readLong(); minorTopVer = in.readInt(); } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeInt("minorTopVer", minorTopVer)) return false; writer.incrementState(); case 1: if (!writer.writeLong("topVer", topVer)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; switch (reader.state()) { case 0: minorTopVer = reader.readInt("minorTopVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 1: topVer = reader.readLong("topVer"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(AffinityTopologyVersion.class); } /** {@inheritDoc} */ @Override public byte directType() { return 111; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 2; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(AffinityTopologyVersion.class, this); } }
package biovis.sierra.client.GUI.GUIHelper; import java.text.NumberFormat; import java.util.ArrayList; import java.util.List; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.Timeline; import javafx.beans.binding.DoubleBinding; import javafx.beans.property.DoubleProperty; import javafx.beans.property.SimpleDoubleProperty; import javafx.scene.chart.ValueAxis; import javafx.util.Duration; /** * A logarithmic axis implementation for JavaFX 2 charts<br> * <br> * * @author Daniel Gerighausen * */ public class LogarithmicAxis extends ValueAxis<Number> { /** * The time of animation in ms */ private static final double ANIMATION_TIME = 2000; private final Timeline lowerRangeTimeline = new Timeline(); private final Timeline upperRangeTimeline = new Timeline(); private final DoubleProperty logUpperBound = new SimpleDoubleProperty(); private final DoubleProperty logLowerBound = new SimpleDoubleProperty(); public LogarithmicAxis() { super(1, 100); bindLogBoundsToDefaultBounds(); } public LogarithmicAxis(double lowerBound, double upperBound) { super(lowerBound, upperBound); try { validateBounds(lowerBound, upperBound); bindLogBoundsToDefaultBounds(); } catch (IllegalLogarithmicRangeException e) { e.printStackTrace(); } } /** * Bind our logarithmic bounds with the super class bounds, consider the base 10 logarithmic scale. */ private void bindLogBoundsToDefaultBounds() { logLowerBound.bind(new DoubleBinding() { { super.bind(lowerBoundProperty()); } @Override protected double computeValue() { return Math.log10(lowerBoundProperty().get()); } }); logUpperBound.bind(new DoubleBinding() { { super.bind(upperBoundProperty()); } @Override protected double computeValue() { return Math.log10(upperBoundProperty().get()); } }); } /** * Validate the bounds by throwing an exception if the values are not conform to the mathematics log interval: * ]0,Double.MAX_VALUE] * * @param lowerBound * @param upperBound * @throws IllegalLogarithmicRangeException */ private void validateBounds(double lowerBound, double upperBound) throws IllegalLogarithmicRangeException { if (lowerBound < 0 || upperBound < 0 || lowerBound > upperBound) { throw new IllegalLogarithmicRangeException( "The logarithmic range should be include to ]0,Double.MAX_VALUE] and the lowerBound should be less than the upperBound"); } } /** * {@inheritDoc} */ @Override protected List<Number> calculateMinorTickMarks() { Number[] range = getRange(); List<Number> minorTickMarksPositions = new ArrayList<Number>(); if (range != null) { Number upperBound = range[1]; double logUpperBound = Math.log10(upperBound.doubleValue()); // System.err.println(logUpperBound); int minorTickMarkCount = getMinorTickCount(); // System.err.println(minorTickMarkCount); for (double i = 0; i <= logUpperBound; i += 0.5) { //for (double j = 0; j <= 9; j += (1. / 1)) { //double value = j * Math.pow(10, i); double value = Math.pow(10, i); minorTickMarksPositions.add(value); //} } } return minorTickMarksPositions; } /** * {@inheritDoc} */ @Override protected List<Number> calculateTickValues(double length, Object range) { List<Number> tickPositions = new ArrayList<Number>(); if (range != null) { // Number lowerBound = ((Number[]) range)[0]; Number upperBound = ((Number[]) range)[1]; // double logLowerBound = Math.log10(lowerBound.doubleValue()); double logUpperBound = Math.log10(upperBound.doubleValue()); for (double i = 0; i <= logUpperBound; i += 0.5) { // for (double j = 1; j <= 9; j++) { // double value = j * Math.pow(10, i); double value = Math.pow(10, i); tickPositions.add(value); // } } } return tickPositions; } @Override protected Number[] getRange() { return new Number[] { lowerBoundProperty().get(), upperBoundProperty().get() }; } @Override protected String getTickMarkLabel(Number value) { NumberFormat formatter = NumberFormat.getInstance(); formatter.setMaximumIntegerDigits(10); formatter.setMinimumIntegerDigits(1); return formatter.format(value); } /** * {@inheritDoc} */ @Override protected void setRange(Object range, boolean animate) { if (range != null) { Number lowerBound = ((Number[]) range)[0]; Number upperBound = ((Number[]) range)[1]; try { validateBounds(lowerBound.doubleValue(), upperBound.doubleValue()); } catch (IllegalLogarithmicRangeException e) { e.printStackTrace(); } if (animate) { try { lowerRangeTimeline.getKeyFrames().clear(); upperRangeTimeline.getKeyFrames().clear(); lowerRangeTimeline.getKeyFrames() .addAll(new KeyFrame(Duration.ZERO, new KeyValue(lowerBoundProperty(), lowerBoundProperty() .get())), new KeyFrame(new Duration(ANIMATION_TIME), new KeyValue(lowerBoundProperty(), lowerBound.doubleValue()))); upperRangeTimeline.getKeyFrames() .addAll(new KeyFrame(Duration.ZERO, new KeyValue(upperBoundProperty(), upperBoundProperty() .get())), new KeyFrame(new Duration(ANIMATION_TIME), new KeyValue(upperBoundProperty(), upperBound.doubleValue()))); lowerRangeTimeline.play(); upperRangeTimeline.play(); } catch (Exception e) { lowerBoundProperty().set(lowerBound.doubleValue()); upperBoundProperty().set(upperBound.doubleValue()); } } lowerBoundProperty().set(lowerBound.doubleValue()); upperBoundProperty().set(upperBound.doubleValue()); } } @Override public Number getValueForDisplay(double displayPosition) { double delta = logUpperBound.get() - logLowerBound.get(); if (getSide().isVertical()) { return Math.pow(10, (((displayPosition - getHeight()) / -getHeight()) * delta) + logLowerBound.get()); } else { return Math.pow(10, (((displayPosition / getWidth()) * delta) + logLowerBound.get())); } } @Override public double getDisplayPosition(Number value) { double delta = logUpperBound.get() - logLowerBound.get(); double deltaV = Math.log10(value.doubleValue()) - logLowerBound.get(); if (getSide().isVertical()) { return (1. - ((deltaV) / delta)) * getHeight(); } else { return ((deltaV) / delta) * getWidth(); } } public class IllegalLogarithmicRangeException extends Exception { /** * */ private static final long serialVersionUID = 7973825992270560204L; /** * @param message */ public IllegalLogarithmicRangeException(String message) { super(message); } } }
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * (C) Copyright 2009-2020, Arnaud Roques * * Project Info: https://plantuml.com * * If you like this project or if you find it useful, you can support us at: * * https://plantuml.com/patreon (only 1$ per month!) * https://plantuml.com/paypal * * This file is part of PlantUML. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * Original Author: Arnaud Roques */ package net.sourceforge.plantuml.cucadiagram.dot; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import net.sourceforge.plantuml.OptionFlags; import net.sourceforge.plantuml.api.MyRunnable; import net.sourceforge.plantuml.api.TimeoutExecutor; import net.sourceforge.plantuml.security.SFile; public class ProcessRunner { private final String[] cmd; private String error; private String out; private volatile ProcessState state = ProcessState.INIT(); private final Lock changeState = new ReentrantLock(); public ProcessRunner(String[] cmd) { this.cmd = cmd; } public ProcessState run(byte in[], OutputStream redirection) { return run(in, redirection, null); } public ProcessState run(byte in[], OutputStream redirection, SFile dir) { if (this.state.differs(ProcessState.INIT())) { throw new IllegalStateException(); } this.state = ProcessState.RUNNING(); final MainThread mainThread = new MainThread(cmd, dir, redirection, in); try { // http://steveliles.github.io/invoking_processes_from_java.html final long timeoutMs = OptionFlags.getInstance().getTimeoutMs(); final boolean done = new TimeoutExecutor(timeoutMs).executeNow(mainThread); } finally { changeState.lock(); try { if (state.equals(ProcessState.RUNNING())) { state = ProcessState.TIMEOUT(); // mainThread.cancel(); } } finally { changeState.unlock(); } } if (state.equals(ProcessState.TERMINATED_OK())) { assert mainThread != null; this.error = mainThread.getError(); this.out = mainThread.getOut(); } return state; } class MainThread implements MyRunnable { private final String[] cmd; private final SFile dir; private final OutputStream redirection; private final byte[] in; private volatile Process process; private volatile ThreadStream errorStream; private volatile ThreadStream outStream; public MainThread(String[] cmd, SFile dir, OutputStream redirection, byte[] in) { this.cmd = cmd; this.dir = dir; this.redirection = redirection; this.in = in; } public String getOut() { return outStream.getString(); } public String getError() { return errorStream.getString(); } public void runJob() throws InterruptedException { try { startThreads(); if (state.equals(ProcessState.RUNNING())) { final int result = joinInternal(); } } finally { changeState.lock(); try { if (state.equals(ProcessState.RUNNING())) { state = ProcessState.TERMINATED_OK(); } } finally { changeState.unlock(); } if (process != null) { process.destroy(); close(process.getErrorStream()); close(process.getOutputStream()); close(process.getInputStream()); } } } public void cancelJob() { // The changeState lock is ok // assert changeState.tryLock(); // assert state == ProcessState.TIMEOUT; if (process != null) { errorStream.cancel(); outStream.cancel(); process.destroy(); // interrupt(); close(process.getErrorStream()); close(process.getOutputStream()); close(process.getInputStream()); } } private void startThreads() { try { process = Runtime.getRuntime().exec(cmd, null, dir == null ? null : dir.conv()); } catch (IOException e) { e.printStackTrace(); changeState.lock(); try { state = ProcessState.IO_EXCEPTION1(e); } finally { changeState.unlock(); } e.printStackTrace(); return; } errorStream = new ThreadStream(process.getErrorStream(), null); outStream = new ThreadStream(process.getInputStream(), redirection); errorStream.start(); outStream.start(); if (in != null) { final OutputStream os = process.getOutputStream(); try { try { os.write(in); } finally { os.close(); } } catch (IOException e) { changeState.lock(); try { state = ProcessState.IO_EXCEPTION2(e); } finally { changeState.unlock(); } e.printStackTrace(); } } } public int joinInternal() throws InterruptedException { errorStream.join(); outStream.join(); final int result = process.waitFor(); return result; } } class ThreadStream extends Thread { private volatile InputStream streamToRead; private volatile OutputStream redirection; private volatile StringBuffer sb = new StringBuffer(); ThreadStream(InputStream streamToRead, OutputStream redirection) { this.streamToRead = streamToRead; this.redirection = redirection; } public String getString() { if (sb == null) { return ""; } return sb.toString(); } public void cancel() { assert state.equals(ProcessState.TIMEOUT()) || state.equals(ProcessState.RUNNING()) : state; this.interrupt(); sb = null; streamToRead = null; redirection = null; // Because of this, some NPE may occurs in run() method, but we do not care } @Override public void run() { int read = 0; try { while ((read = streamToRead.read()) != -1) { if (state.equals(ProcessState.TIMEOUT())) { return; } if (redirection == null) { sb.append((char) read); } else { redirection.write(read); } } } catch (Throwable e) { System.err.println("ProcessRunnerA " + e); e.printStackTrace(); sb.append('\n'); sb.append(e.toString()); } } } public final String getError() { return error; } public final String getOut() { return out; } private void close(InputStream is) { try { if (is != null) { is.close(); } } catch (IOException e) { e.printStackTrace(); } } private void close(OutputStream os) { try { if (os != null) { os.close(); } } catch (IOException e) { e.printStackTrace(); } } }
package lodVader.mongodb.queries; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.mongodb.AggregationOptions; import com.mongodb.AggregationOutput; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.Cursor; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import lodVader.bloomfilters.BloomFilterI; import lodVader.bloomfilters.impl.BloomFilterFactory; import lodVader.bloomfilters.models.LoadedBloomFiltersCache; import lodVader.configuration.LODVaderProperties; import lodVader.enumerators.TuplePart; import lodVader.linksets.DistributionBloomFilterContainer; import lodVader.mongodb.DBSuperClass2; import lodVader.mongodb.collections.DatasetDB; import lodVader.mongodb.collections.DistributionDB; import lodVader.mongodb.collections.RDFResources.allPredicates.AllPredicatesDB; import lodVader.mongodb.collections.RDFResources.allPredicates.AllPredicatesRelationDB; import lodVader.mongodb.collections.namespaces.DistributionObjectNS0DB; import lodVader.mongodb.collections.namespaces.DistributionObjectNSDB; import lodVader.mongodb.collections.namespaces.DistributionSubjectNS0DB; import lodVader.mongodb.collections.namespaces.DistributionSubjectNSDB; import lodVader.utils.NSUtils; @Component public class DistributionQueries { @Autowired DBSuperClass2 db; final static Logger logger = LoggerFactory.getLogger(DistributionQueries.class); public int distributionQuerySize; private NSUtils nsUtils = new NSUtils(); AggregationOptions aggregationOptions = AggregationOptions.builder().batchSize(100) .outputMode(AggregationOptions.OutputMode.CURSOR).allowDiskUse(true).build(); public HashSet<Integer> getDistributionsBySubjectNS(String nsToSearch) { BasicDBObject query = new BasicDBObject(DistributionSubjectNS0DB.NS, nsToSearch); DBCollection collection = db.getDBInstance().getCollection(DistributionSubjectNS0DB.COLLECTION_NAME); HashSet<Integer> hash = new HashSet<Integer>(); Cursor cursor = collection.find(query); while (cursor.hasNext()) { DBObject instance = cursor.next(); // DistributionDB distribution = new DistributionDB( // ((Number) instance.get(DistributionSubjectNS0DB.DISTRIBUTION_ID)).intValue()); // // if (distribution.getTriples() >= 1000) hash.add(((Number) instance.get(DistributionSubjectNS0DB.DISTRIBUTION_ID)).intValue()); } return hash; } public HashSet<Integer> getDistributionsByObjectNS(String nsToSearch) { BasicDBObject query = new BasicDBObject(DistributionObjectNS0DB.NS, nsToSearch); DBCollection collection = db.getDBInstance().getCollection(DistributionObjectNS0DB.COLLECTION_NAME); HashSet<Integer> hash = new HashSet<Integer>(); Cursor cursor = collection.find(query); while (cursor.hasNext()) { DBObject instance = cursor.next(); // DistributionDB distribution = new DistributionDB( // ((Number) instance.get(DistributionSubjectNS0DB.DISTRIBUTION_ID)).intValue()); // // if (distribution.getTriples() >= 1000) hash.add(((Number) instance.get(DistributionObjectNS0DB.DISTRIBUTION_ID)).intValue()); } return hash; } public ArrayList<DistributionDB> getDistributionsByOutdegree(ArrayList<String> nsToSearch, ConcurrentHashMap<Integer, DistributionBloomFilterContainer> distributionFilter) { ArrayList<DistributionDB> list = new ArrayList<DistributionDB>(); try { // query all NS BasicDBObject query = new BasicDBObject(DistributionSubjectNS0DB.NS, new BasicDBObject("$in", nsToSearch)); DBCollection collection = db.getDBInstance() .getCollection(DistributionSubjectNS0DB.COLLECTION_NAME); // group by Cursor cursor = collection .aggregate(Arrays.asList( new BasicDBObject("$match", new BasicDBObject(DistributionSubjectNS0DB.NS, new BasicDBObject("$in", nsToSearch))), new BasicDBObject("$group", new BasicDBObject("_id", "$" + DistributionSubjectNS0DB.DISTRIBUTION_ID)) ), aggregationOptions); // save a list with distribution and fqdn while (cursor.hasNext()) { DBObject instance = cursor.next(); DistributionDB distribution = new DistributionDB(); distribution.setLodVaderID(((Number) instance.get("_id")).intValue()); if (distribution.find(true)) { list.add(distribution); if (!distributionFilter.containsKey(distribution.getLODVaderID())) { distributionFilter.put(distribution.getLODVaderID(), new DistributionBloomFilterContainer(distribution.getLODVaderID())); } } } } catch (Exception e) { e.printStackTrace(); } return list; } public BloomFilterI getDescribedNS0(String resourceType) { List<String> cursor; if (resourceType.equals(TuplePart.SUBJECT)) { DBCollection collection = db.getDBInstance() .getCollection(DistributionSubjectNS0DB.COLLECTION_NAME); cursor = collection.distinct(DistributionSubjectNS0DB.NS); } else { DBCollection collection = db.getDBInstance() .getCollection(DistributionObjectNS0DB.COLLECTION_NAME); cursor = collection.distinct(DistributionObjectNS0DB.NS); } int size = cursor.size(); if (size < 5000) size = 5000; BloomFilterI g = BloomFilterFactory.newBloomFilter(); g.create(cursor.size(), 0.00001); for (String s : cursor) { g.add(s); } return g; } public BloomFilterI getDescribedNS(TuplePart resourceType) { DBObject groupIdFields = null; if (resourceType.equals(TuplePart.OBJECT)) groupIdFields = new BasicDBObject("_id", "$" + DistributionObjectNSDB.NS); else if (resourceType.equals(TuplePart.SUBJECT)) groupIdFields = new BasicDBObject("_id", "$" + DistributionSubjectNSDB.NS); // groupIdFields.put("count", new BasicDBObject("$sum", 1)); DBObject group = new BasicDBObject("$group", groupIdFields); DBObject projectFields = new BasicDBObject("_id", 0); if (resourceType.equals(TuplePart.OBJECT)) projectFields.put(DistributionObjectNSDB.NS, "$_id"); else if (resourceType.equals(TuplePart.SUBJECT)) projectFields.put(DistributionSubjectNSDB.NS, "$_id"); // projectFields.put("count", new BasicDBObject("$sum", 1)); DBObject project = new BasicDBObject("$project", projectFields); ArrayList<DBObject> ag = new ArrayList<DBObject>(); ag.add(group); ag.add(project); AggregationOptions options = AggregationOptions.builder().outputMode(AggregationOptions.OutputMode.CURSOR) .allowDiskUse(true).build(); BloomFilterI g = null; if (resourceType.equals(TuplePart.OBJECT)) { DBCollection collection = db.getDBInstance() .getCollection(DistributionObjectNSDB.COLLECTION_NAME); g = BloomFilterFactory.newBloomFilter(); g.create(collection.find().size() + LODVaderProperties.BF_BUFFER_RANGE, 0.0001); int size = 0; Cursor aggregate = collection.aggregate(ag, options); while (aggregate.hasNext()) { DBObject d = aggregate.next(); g.add(d.get(DistributionObjectNSDB.NS).toString()); size++; } logger.info("Loaded " + size + " object namespaces."); } else if (resourceType.equals(TuplePart.SUBJECT)) { DBCollection collection = db.getDBInstance() .getCollection(DistributionSubjectNSDB.COLLECTION_NAME); g = BloomFilterFactory.newBloomFilter(); g.create(collection.find().size() + LODVaderProperties.BF_BUFFER_RANGE, 0.0001); Cursor aggregate = collection.aggregate(ag, options); int size = 0; while (aggregate.hasNext()) { DBObject d = aggregate.next(); g.add(d.get(DistributionSubjectNSDB.NS).toString()); size++; } logger.info("Loaded " + size + " subject namespaces."); } return g; } public ArrayList<DistributionDB> getDistributionsByIndegree(ArrayList<String> fqdnToSearch, ConcurrentHashMap<Integer, DistributionBloomFilterContainer> fqdnPerDistribution) { ArrayList<DistributionDB> list = new ArrayList<DistributionDB>(); HashSet<Integer> map = new HashSet<Integer>(); try { BasicDBObject query = new BasicDBObject(DistributionObjectNS0DB.NS, new BasicDBObject("$in", fqdnToSearch)); DBCollection collection = db.getDBInstance() .getCollection(DistributionObjectNS0DB.COLLECTION_NAME); // fileds to be projected BasicDBObject project = new BasicDBObject(DistributionSubjectNS0DB.DISTRIBUTION_ID, 1); DBCursor cursor = collection.find(query, project); while (cursor.hasNext()) { DBObject instance = cursor.next(); if (!map.contains(((Integer) instance.get(DistributionSubjectNS0DB.DISTRIBUTION_ID)).intValue())) { // DistributionDB distribution = new DistributionDB( // ((Number) // instance.get(DistributionObjectNS0DB.DISTRIBUTION_ID)).intValue()); DistributionDB distribution = new DistributionDB(); distribution .setLodVaderID(((Number) instance.get(DistributionObjectNS0DB.DISTRIBUTION_ID)).intValue()); if (distribution.find(true)) { list.add(distribution); if (!fqdnPerDistribution.containsKey(distribution.getUri())) { fqdnPerDistribution.put(distribution.getLODVaderID(), new DistributionBloomFilterContainer(distribution.getLODVaderID())); } } map.add(((Integer) instance.get(DistributionSubjectNS0DB.DISTRIBUTION_ID)).intValue()); } } } catch (Exception e) { e.printStackTrace(); } return list; } /** * * @return number of total triples read */ public Double getNumberOfTriples() { Double numberOfTriples = 0.0; try { DBCollection collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); BasicDBObject select = new BasicDBObject("$match", new BasicDBObject(DistributionDB.SUCCESSFULLY_DOWNLOADED, true)); BasicDBObject groupFields = new BasicDBObject("_id", null); groupFields.append("sum", new BasicDBObject("$sum", "$triples")); DBObject group = new BasicDBObject("$group", groupFields); // run aggregation List<DBObject> pipeline = Arrays.asList(select, group); AggregationOutput output = collection.aggregate(pipeline); for (DBObject result : output.results()) { numberOfTriples = Double.valueOf(result.get("sum").toString()); } } catch (Exception e) { e.printStackTrace(); } return numberOfTriples; } /** * * @return number of total triples by vocab */ public long getNumberOfTriples(Boolean isVocab) { long totalTriples = 0; try { DBCollection collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); BasicDBObject query; if (isVocab != null) query = new BasicDBObject(new BasicDBObject(DistributionDB.IS_VOCABULARY, isVocab)); else query = new BasicDBObject(); DBCursor instances = collection.find(query); Iterator<DBObject> it = instances.iterator(); while (it.hasNext()) { totalTriples = totalTriples + Long.parseLong(it.next().get(DistributionDB.TRIPLES).toString()); } } catch (Exception e) { e.printStackTrace(); } return totalTriples; } /** * Get all distributions * * @param vocabularies * specifies whether should vocabularies be added in the return * list. If the value is null, vocabularies ans distrubitions * will be returned * @return a ArrayList of DistributionMongoDBObject */ public ArrayList<DistributionDB> getDistributions(Boolean vocabularies, String status, Integer datasetID) { ArrayList<DistributionDB> list = new ArrayList<DistributionDB>(); DBCursor instances; try { DBCollection collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); BasicDBList and = new BasicDBList(); if (vocabularies != null) { if (vocabularies) and.add(new BasicDBObject(DistributionDB.IS_VOCABULARY, true)); else and.add(new BasicDBObject(DistributionDB.IS_VOCABULARY, false)); } if (status != null && status != "") and.add(new BasicDBObject(DistributionDB.STATUS, status)); if (datasetID != null) and.add(new BasicDBObject(DistributionDB.TOP_DATASET, datasetID)); if (and.size() > 0) instances = collection.find(new BasicDBObject("$and", and)); else instances = collection.find(); for (DBObject instance : instances) { list.add(new DistributionDB(instance)); } } catch (Exception e) { e.printStackTrace(); } return list; } /** * Get distributions using filters * * @param skip * how many distribution to skip * @param limit * size of the range * @param searchVocabularies * true only for vocabularies, false only for datasets and null * for vocabularies and datasets * @param seach * string to compare with distribution name or downloadurl * @param searchStatus * search status: DONE, ERROR, WAITING_TO_STREAM or STREAMING. * @return a ArrayList of DistributionMongoDBObject */ public ArrayList<DistributionDB> getDistributions(int skip, int limit, Boolean searchVocabularies, String searchNameOrURL, List<Integer> in, String searchStatus) { ArrayList<DistributionDB> list = new ArrayList<DistributionDB>(); try { DBCollection collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); DBObject query = null; if (searchNameOrURL != "") { DBObject query2; DBObject query3; query2 = new BasicDBObject(DistributionDB.DOWNLOAD_URL, java.util.regex.Pattern.compile(searchNameOrURL)); query3 = new BasicDBObject(DistributionDB.TITLE, java.util.regex.Pattern.compile(searchNameOrURL)); BasicDBList or = new BasicDBList(); or.add(query3); or.add(query2); query = new BasicDBObject("$or", or); } if (in.size() > 0) { BasicDBList and = new BasicDBList(); if (query != null) and.add(query); and.add(new BasicDBObject(DistributionDB.LOD_VADER_ID, new BasicDBObject("$in", in))); query = new BasicDBObject("$and", and); } if (searchVocabularies != null) { BasicDBList and = new BasicDBList(); if (query != null) and.add(query); and.add(new BasicDBObject(DistributionDB.IS_VOCABULARY, searchVocabularies)); query = new BasicDBObject("$and", and); } if (!searchStatus.equals("")) { BasicDBList and = new BasicDBList(); if (query != null) and.add(query); and.add(new BasicDBObject(DistributionDB.STATUS, searchStatus)); query = new BasicDBObject("$and", and); } logger.debug("MongoDB query: " + query); DBCursor instances = collection.find(query); distributionQuerySize = instances.size(); BasicDBObject sort = new BasicDBObject(DistributionDB.TRIPLES, -1); instances = collection.find(query).skip(skip).limit(limit).sort(sort); for (DBObject instance : instances) { list.add(new DistributionDB(instance)); } } catch (Exception e) { e.printStackTrace(); } return list; } // return all distributions public ArrayList<DistributionDB> getDistributionsByTopDatasetURL(DatasetDB topDataset) { ArrayList<DistributionDB> distributionList = new ArrayList<DistributionDB>(); DBCollection collection; try { collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); DBCursor instances = collection .find(new BasicDBObject(DistributionDB.DEFAULT_DATASETS, topDataset.getLODVaderID())); for (DBObject instance : instances) { distributionList.add(new DistributionDB(instance)); } } catch (Exception e) { e.printStackTrace(); } return distributionList; } // return all distributions public ArrayList<DistributionDB> getDistributionsByCohesion() { ArrayList<DistributionDB> distributionList = new ArrayList<DistributionDB>(); DBCollection collection; try { collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); DBCursor instances = collection.find().sort(new BasicDBObject(DistributionDB.OBJECTS_COHESION, 1)); for (DBObject instance : instances) { distributionList.add(new DistributionDB(instance)); } } catch (Exception e) { e.printStackTrace(); } return distributionList; } // return all distributions public ArrayList<DistributionDB> getSetOfDistributions(Set<Integer> set) { ArrayList<DistributionDB> distributionList = new ArrayList<DistributionDB>(); DBCollection collection = db.getDBInstance().getCollection(DatasetDB.COLLECTION_NAME); try { collection = db.getDBInstance().getCollection(DistributionDB.COLLECTION_NAME); DBCursor instances = collection .find(new BasicDBObject(DistributionDB.LOD_VADER_ID, new BasicDBObject("$in", set))); for (DBObject instance : instances) { distributionList.add(new DistributionDB(instance)); } } catch (Exception e) { e.printStackTrace(); } return distributionList; } // @Test // public void queryDistribution(){ public HashSet<DistributionDB> queryDistribution(String resource, TuplePart type) { // String resource = "http://www.w3.org/2005/11/its/rdf#taSource"; // String type = LODVaderProperties.TYPE_SUBJECT; HashSet<DistributionDB> setOfDistributionNS = new HashSet<DistributionDB>(); // get resource fqdn String ns = nsUtils.getNSFromString(resource); if (type.equals(TuplePart.SUBJECT)) { DBCollection collection = db.getCollection(DistributionSubjectNSDB.COLLECTION_NAME); DBObject query; query = new BasicDBObject(DistributionSubjectNSDB.NS, ns); DBCursor instances = collection.find(query); ArrayList<LoadedBloomFiltersCache> cache = new ArrayList<LoadedBloomFiltersCache>(); for (DBObject instance : instances) { DistributionDB d = new DistributionDB( Integer.valueOf(instance.get(DistributionSubjectNSDB.DISTRIBUTION_ID).toString())); LoadedBloomFiltersCache l = new LoadedBloomFiltersCache(d, resource, TuplePart.SUBJECT); l.start(); cache.add(l); } for (LoadedBloomFiltersCache l : cache) { try { l.join(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } for (LoadedBloomFiltersCache l : cache) { if (l.found) { setOfDistributionNS.add(l.getDistribution()); } } } else if (type.equals(TuplePart.OBJECT)) { DBCollection collection = db.getDBInstance() .getCollection(DistributionObjectNSDB.COLLECTION_NAME); DBObject query; query = new BasicDBObject(DistributionObjectNSDB.NS, ns); DBCursor instances = collection.find(query); ArrayList<LoadedBloomFiltersCache> cache = new ArrayList<LoadedBloomFiltersCache>(); for (DBObject instance : instances) { DistributionDB d = new DistributionDB( Integer.valueOf(instance.get(DistributionSubjectNSDB.DISTRIBUTION_ID).toString())); LoadedBloomFiltersCache l = new LoadedBloomFiltersCache(d, resource, TuplePart.OBJECT); l.start(); cache.add(l); } for (LoadedBloomFiltersCache l : cache) { try { l.join(); } catch (InterruptedException e) { e.printStackTrace(); } } for (LoadedBloomFiltersCache l : cache) { if (l.found) setOfDistributionNS.add(l.getDistribution()); } } else if (type.equals(TuplePart.PROPERTY)) { DBCollection collection = db.getDBInstance().getCollection(AllPredicatesDB.COLLECTION_NAME); DBObject query; query = new BasicDBObject(AllPredicatesDB.URI, resource); DBCursor instances = collection.find(query); int predicateID; if (instances.iterator().hasNext()) predicateID = Integer .parseInt(instances.iterator().next().get(AllPredicatesDB.LOD_VADER_ID).toString()); else return setOfDistributionNS; collection = db.getDBInstance().getCollection(AllPredicatesRelationDB.COLLECTION_NAME); query = new BasicDBObject(AllPredicatesRelationDB.PREDICATE_ID, predicateID); instances = collection.find(query); for (DBObject instance : instances) { DistributionDB d = new DistributionDB( Integer.valueOf(instance.get(AllPredicatesRelationDB.DISTRIBUTION_ID).toString())); setOfDistributionNS.add(d); } } return setOfDistributionNS; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.location.basic; import static brooklyn.util.GroovyJavaMethods.truth; import groovy.lang.Closure; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.io.Reader; import java.io.StringReader; import java.net.InetAddress; import java.net.Socket; import java.security.KeyPair; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.config.BrooklynLogging; import brooklyn.config.ConfigKey; import brooklyn.config.ConfigKey.HasConfigKey; import brooklyn.config.ConfigUtils; import brooklyn.entity.basic.BrooklynConfigKeys; import brooklyn.entity.basic.ConfigKeys; import brooklyn.event.basic.BasicConfigKey; import brooklyn.event.basic.MapConfigKey; import brooklyn.location.MachineDetails; import brooklyn.location.MachineLocation; import brooklyn.location.OsDetails; import brooklyn.location.PortRange; import brooklyn.location.PortSupplier; import brooklyn.management.Task; import brooklyn.util.ResourceUtils; import brooklyn.util.collections.MutableMap; import brooklyn.util.config.ConfigBag; import brooklyn.util.crypto.SecureKeys; import brooklyn.util.exceptions.Exceptions; import brooklyn.util.file.ArchiveUtils; import brooklyn.util.flags.SetFromFlag; import brooklyn.util.flags.TypeCoercions; import brooklyn.util.guava.KeyTransformingLoadingCache.KeyTransformingSameTypeLoadingCache; import brooklyn.util.internal.ssh.ShellTool; import brooklyn.util.internal.ssh.SshException; import brooklyn.util.internal.ssh.SshTool; import brooklyn.util.internal.ssh.sshj.SshjTool; import brooklyn.util.mutex.MutexSupport; import brooklyn.util.mutex.WithMutexes; import brooklyn.util.net.Urls; import brooklyn.util.pool.BasicPool; import brooklyn.util.pool.Pool; import brooklyn.util.ssh.BashCommands; import brooklyn.util.stream.KnownSizeInputStream; import brooklyn.util.stream.ReaderInputStream; import brooklyn.util.stream.StreamGobbler; import brooklyn.util.task.BasicTask; import brooklyn.util.task.ScheduledTask; import brooklyn.util.task.Tasks; import brooklyn.util.task.system.internal.ExecWithLoggingHelpers; import brooklyn.util.task.system.internal.ExecWithLoggingHelpers.ExecRunner; import brooklyn.util.text.Strings; import brooklyn.util.time.Duration; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; /** * Operations on a machine that is accessible via ssh. * <p> * We expose two ways of running scripts. * The execCommands method passes lines to bash and is lightweight but fragile. * The execScript method creates a script on the remote machine. It is portable but heavier. * <p> * Additionally there are routines to copyTo, copyFrom; and installTo (which tries a curl, and falls back to copyTo * in event the source is accessible by the caller only). */ public class SshMachineLocation extends AbstractLocation implements MachineLocation, PortSupplier, WithMutexes, Closeable { /** @deprecated since 0.7.0 shouldn't be public */ public static final Logger LOG = LoggerFactory.getLogger(SshMachineLocation.class); /** @deprecated since 0.7.0 shouldn't be public */ public static final Logger logSsh = LoggerFactory.getLogger(BrooklynLogging.SSH_IO); public static final ConfigKey<Duration> SSH_CACHE_EXPIRY_DURATION = ConfigKeys.newConfigKey(Duration.class, "sshCacheExpiryDuration", "Expiry time for unused cached ssh connections", Duration.FIVE_MINUTES); @SetFromFlag protected String user; @SetFromFlag(nullable = false) protected InetAddress address; @SetFromFlag protected transient WithMutexes mutexSupport; @SetFromFlag private Set<Integer> usedPorts; private volatile MachineDetails machineDetails; private final Object machineDetailsLock = new Object(); public static final ConfigKey<String> SSH_HOST = BrooklynConfigKeys.SSH_CONFIG_HOST; public static final ConfigKey<Integer> SSH_PORT = BrooklynConfigKeys.SSH_CONFIG_PORT; public static final ConfigKey<String> SSH_EXECUTABLE = ConfigKeys.newStringConfigKey("sshExecutable", "Allows an `ssh` executable file to be specified, to be used in place of the default (programmatic) java ssh client"); public static final ConfigKey<String> SCP_EXECUTABLE = ConfigKeys.newStringConfigKey("scpExecutable", "Allows an `scp` executable file to be specified, to be used in place of the default (programmatic) java ssh client"); // TODO remove public static final ConfigKey<String> PASSWORD = SshTool.PROP_PASSWORD; public static final ConfigKey<String> PRIVATE_KEY_FILE = SshTool.PROP_PRIVATE_KEY_FILE; public static final ConfigKey<String> PRIVATE_KEY_DATA = SshTool.PROP_PRIVATE_KEY_DATA; public static final ConfigKey<String> PRIVATE_KEY_PASSPHRASE = SshTool.PROP_PRIVATE_KEY_PASSPHRASE; public static final ConfigKey<String> SCRIPT_DIR = ConfigKeys.newStringConfigKey( "scriptDir", "directory where scripts should be placed and executed on the SSH target machine"); public static final ConfigKey<Map<String,Object>> SSH_ENV_MAP = new MapConfigKey<Object>( Object.class, "env", "environment variables to pass to the remote SSH shell session"); public static final ConfigKey<Boolean> ALLOCATE_PTY = SshTool.PROP_ALLOCATE_PTY; public static final ConfigKey<OutputStream> STDOUT = new BasicConfigKey<OutputStream>(OutputStream.class, "out"); public static final ConfigKey<OutputStream> STDERR = new BasicConfigKey<OutputStream>(OutputStream.class, "err"); public static final ConfigKey<Boolean> NO_STDOUT_LOGGING = ConfigKeys.newBooleanConfigKey( "noStdoutLogging", "whether to disable logging of stdout from SSH commands (e.g. for verbose commands)", false); public static final ConfigKey<Boolean> NO_STDERR_LOGGING = ConfigKeys.newBooleanConfigKey( "noStderrLogging", "whether to disable logging of stderr from SSH commands (e.g. for verbose commands)", false); public static final ConfigKey<String> LOG_PREFIX = ConfigKeys.newStringConfigKey("logPrefix"); public static final ConfigKey<String> LOCAL_TEMP_DIR = SshTool.PROP_LOCAL_TEMP_DIR; public static final ConfigKey<Boolean> CLOSE_CONNECTION = ConfigKeys.newBooleanConfigKey("close", "Close the SSH connection after use", false); public static final ConfigKey<String> UNIQUE_ID = ConfigKeys.newStringConfigKey("unique", "Unique ID for the SSH connection"); /** * Specifies config keys where a change in the value does not require a new SshTool instance, * i.e. they can be specified per command on the tool */ // TODO: Fully specify. public static final Set<ConfigKey<?>> REUSABLE_SSH_PROPS = ImmutableSet.of( STDOUT, STDERR, SCRIPT_DIR, CLOSE_CONNECTION, SshTool.PROP_SCRIPT_HEADER, SshTool.PROP_PERMISSIONS, SshTool.PROP_LAST_MODIFICATION_DATE, SshTool.PROP_LAST_ACCESS_DATE, SshTool.PROP_OWNER_UID, SshTool.PROP_SSH_RETRY_DELAY); public static final Set<HasConfigKey<?>> ALL_SSH_CONFIG_KEYS = ImmutableSet.<HasConfigKey<?>>builder() .addAll(ConfigUtils.getStaticKeysOnClass(SshMachineLocation.class)) .addAll(ConfigUtils.getStaticKeysOnClass(SshTool.class)) .build(); public static final Set<String> ALL_SSH_CONFIG_KEY_NAMES = ImmutableSet.copyOf(Iterables.transform(ALL_SSH_CONFIG_KEYS, new Function<HasConfigKey<?>,String>() { @Override public String apply(HasConfigKey<?> input) { return input.getConfigKey().getName(); } })); private Task<?> cleanupTask; private transient LoadingCache<Map<String, ?>, Pool<SshTool>> sshPoolCache; public SshMachineLocation() { this(MutableMap.of()); } public SshMachineLocation(Map properties) { super(properties); usedPorts = (usedPorts != null) ? Sets.newLinkedHashSet(usedPorts) : Sets.<Integer>newLinkedHashSet(); } private LoadingCache<Map<String, ?>, Pool<SshTool>> buildSshToolPoolCacheLoader() { // TODO: Appropriate numbers for maximum size and expire after access // At the moment every SshMachineLocation instance creates its own pool. // It might make more sense to create one pool and inject it into all SshMachineLocations. Duration expiryDuration = getConfig(SSH_CACHE_EXPIRY_DURATION); LoadingCache<Map<String, ?>, Pool<SshTool>> delegate = CacheBuilder.newBuilder() .maximumSize(10) .expireAfterAccess(expiryDuration.toMilliseconds(), TimeUnit.MILLISECONDS) .recordStats() .removalListener(new RemovalListener<Map<String, ?>, Pool<SshTool>>() { // TODO: Does it matter that this is synchronous? - Can closing pools cause long delays? @Override public void onRemoval(RemovalNotification<Map<String, ?>, Pool<SshTool>> notification) { Pool<SshTool> removed = notification.getValue(); if (removed == null) { if (LOG.isDebugEnabled()) { LOG.debug("Pool evicted from SshTool cache is null so we can't call pool.close(). " + "It's probably already been garbage collected. Eviction cause: {} ", notification.getCause().name()); } } else { if (LOG.isDebugEnabled()) { LOG.debug("{} evicted from SshTool cache. Eviction cause: {}", removed, notification.getCause().name()); } try { removed.close(); } catch (IOException e) { if (LOG.isDebugEnabled()) { LOG.debug("Exception closing "+removed, e); } } } } }) .build(new CacheLoader<Map<String, ?>, Pool<SshTool>>() { public Pool<SshTool> load(Map<String, ?> properties) { if (LOG.isDebugEnabled()) { LOG.debug("{} building ssh pool for {} with properties: {}", new Object[] {this, getSshHostAndPort(), properties}); } return buildPool(properties); } }); final Set<String> reusableSshProperties = ImmutableSet.copyOf( Iterables.transform(REUSABLE_SSH_PROPS, new Function<ConfigKey<?>, String>() { @Override public String apply(ConfigKey<?> input) { return input.getName(); } })); // Groovy-eclipse compiler refused to compile `KeyTransformingSameTypeLoadingCache.from(...)` return new KeyTransformingSameTypeLoadingCache<Map<String, ?>, Pool<SshTool>>( delegate, new Function<Map<String, ?>, Map<String, ?>>() { @Override public Map<String, ?> apply(@Nullable Map<String, ?> input) { Map<String, Object> copy = new HashMap<String, Object>(input); copy.keySet().removeAll(reusableSshProperties); return copy; } }); } private BasicPool<SshTool> buildPool(final Map<String, ?> properties) { return BasicPool.<SshTool>builder() .name(getDisplayName()+"@"+address+ (hasConfig(SSH_HOST, true) ? "("+getConfig(SSH_HOST)+":"+getConfig(SSH_PORT)+")" : "")+ ":"+ System.identityHashCode(this)) .supplier(new Supplier<SshTool>() { @Override public SshTool get() { return connectSsh(properties); }}) .viabilityChecker(new Predicate<SshTool>() { @Override public boolean apply(SshTool input) { return input != null && input.isConnected(); }}) .closer(new Function<SshTool,Void>() { @Override public Void apply(SshTool input) { if (LOG.isDebugEnabled()) { LOG.debug("{} closing pool for {}", this, input); } try { input.disconnect(); } catch (Exception e) { if (logSsh.isDebugEnabled()) logSsh.debug("On machine "+SshMachineLocation.this+", ssh-disconnect failed", e); } return null; }}) .build(); } @Override public void configure(Map properties) { super.configure(properties); // TODO Note that check for addresss!=null is done automatically in super-constructor, in FlagUtils.checkRequiredFields // Yikes, dangerous code for accessing fields of sub-class in super-class' constructor! But getting away with it so far! if (mutexSupport == null) { mutexSupport = new MutexSupport(); } boolean deferConstructionChecks = (properties.containsKey("deferConstructionChecks") && TypeCoercions.coerce(properties.get("deferConstructionChecks"), Boolean.class)); if (!deferConstructionChecks) { if (getDisplayName() == null) { setDisplayName((truth(user) ? user+"@" : "") + address.getHostName()); } } } @Override public void init() { super.init(); sshPoolCache = buildSshToolPoolCacheLoader(); Callable<Task<?>> cleanupTaskFactory = new Callable<Task<?>>() { @Override public Task<Void> call() { return new BasicTask<Void>(new Callable<Void>() { @Override public Void call() { try { if (sshPoolCache != null) sshPoolCache.cleanUp(); return null; } catch (Exception e) { // Don't rethrow: the behaviour of executionManager is different from a scheduledExecutorService, // if we throw an exception, then our task will never get executed again LOG.warn("Problem cleaning up ssh-pool-cache", e); return null; } catch (Throwable t) { LOG.warn("Problem cleaning up ssh-pool-cache (rethrowing)", t); throw Exceptions.propagate(t); } }}); } }; Duration expiryDuration = getConfig(SSH_CACHE_EXPIRY_DURATION); cleanupTask = getManagementContext().getExecutionManager().submit(new ScheduledTask(cleanupTaskFactory).period(expiryDuration)); } @Override public void close() throws IOException { if (sshPoolCache != null) { if (LOG.isDebugEnabled()) { LOG.debug("{} invalidating all entries in ssh pool cache. Final stats: {}", this, sshPoolCache.stats()); } sshPoolCache.invalidateAll(); } if (cleanupTask != null) { cleanupTask.cancel(false); cleanupTask = null; } } @Override protected void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } @Override public InetAddress getAddress() { return address; } public HostAndPort getSshHostAndPort() { String host = getConfig(SSH_HOST); if (host == null || Strings.isEmpty(host)) host = address.getHostName(); Integer port = getConfig(SSH_PORT); if (port == null || port == 0) port = 22; return HostAndPort.fromParts(host, port); } public String getUser() { if (!truth(user)) { if (hasConfig(SshTool.PROP_USER, false)) { LOG.warn("User configuration for "+this+" set after deployment; deprecated behaviour may not be supported in future versions"); } return getConfig(SshTool.PROP_USER); } return user; } /** port for SSHing */ public int getPort() { return getConfig(SshTool.PROP_PORT); } protected <T> T execSsh(final Map<String, ?> props, final Function<ShellTool, T> task) { if (sshPoolCache == null) { // required for uses that instantiate SshMachineLocation directly, so init() will not have been called sshPoolCache = buildSshToolPoolCacheLoader(); } Pool<SshTool> pool = sshPoolCache.getUnchecked(props); if (LOG.isTraceEnabled()) { LOG.trace("{} execSsh got pool: {}", this, pool); } if (truth(props.get(CLOSE_CONNECTION.getName()))) { Function<SshTool, T> close = new Function<SshTool, T>() { @Override public T apply(SshTool input) { T result = task.apply(input); if (LOG.isDebugEnabled()) { LOG.debug("{} invalidating all sshPoolCache entries: {}", SshMachineLocation.this, sshPoolCache.stats().toString()); } sshPoolCache.invalidateAll(); sshPoolCache.cleanUp(); return result; } }; return pool.exec(close); } else { return pool.exec(task); } } protected SshTool connectSsh() { return connectSsh(ImmutableMap.of()); } protected boolean previouslyConnected = false; protected SshTool connectSsh(Map props) { try { if (!truth(user)) { String newUser = getUser(); if (LOG.isTraceEnabled()) LOG.trace("For "+this+", setting user in connectSsh: oldUser="+user+"; newUser="+newUser); user = newUser; } ConfigBag args = new ConfigBag() .configure(SshTool.PROP_USER, user) // default value of host, overridden if SSH_HOST is supplied .configure(SshTool.PROP_HOST, address.getHostName()) .putAll(props); for (Map.Entry<String,Object> entry: getAllConfigBag().getAllConfig().entrySet()) { String key = entry.getKey(); if (key.startsWith(SshTool.BROOKLYN_CONFIG_KEY_PREFIX)) { key = Strings.removeFromStart(key, SshTool.BROOKLYN_CONFIG_KEY_PREFIX); } else if (ALL_SSH_CONFIG_KEY_NAMES.contains(entry.getKey())) { // key should be included, and does not need to be changed // TODO make this config-setting mechanism more universal // currently e.g. it will not admit a tool-specific property. // thinking either we know about the tool here, // or we don't allow unadorned keys to be set // (require use of BROOKLYN_CONFIG_KEY_PREFIX) } else { // this key is not applicable here; ignore it continue; } args.putStringKey(key, entry.getValue()); } if (LOG.isTraceEnabled()) LOG.trace("creating ssh session for "+args); if (!user.equals(args.get(SshTool.PROP_USER))) { LOG.warn("User mismatch configuring ssh for "+this+": preferring user "+args.get(SshTool.PROP_USER)+" over "+user); user = args.get(SshTool.PROP_USER); } // look up tool class String sshToolClass = args.get(SshTool.PROP_TOOL_CLASS); if (sshToolClass==null) sshToolClass = SshjTool.class.getName(); SshTool ssh = (SshTool) Class.forName(sshToolClass).getConstructor(Map.class).newInstance(args.getAllConfig()); if (LOG.isTraceEnabled()) LOG.trace("using ssh-tool {} (of type {}); props ", ssh, sshToolClass); Tasks.setBlockingDetails("Opening ssh connection"); try { ssh.connect(); } finally { Tasks.setBlockingDetails(null); } previouslyConnected = true; return ssh; } catch (Exception e) { if (previouslyConnected) throw Throwables.propagate(e); // subsequence connection (above) most likely network failure, our remarks below won't help // on first connection include additional information if we can't connect, to help with debugging String rootCause = Throwables.getRootCause(e).getMessage(); throw new IllegalStateException("Cannot establish ssh connection to "+user+" @ "+this+ (rootCause!=null && !rootCause.isEmpty() ? " ("+rootCause+")" : "")+". \n"+ "Ensure that passwordless and passphraseless ssh access is enabled using standard keys from ~/.ssh or " + "as configured in brooklyn.properties. " + "Check that the target host is accessible, " + "that credentials are correct (location and permissions if using a key), " + "that the SFTP subsystem is available on the remote side, " + "and that there is sufficient random noise in /dev/random on both ends. " + "To debug less common causes, see the original error in the trace or log, and/or enable 'net.schmizz' (sshj) logging." , e); } } // TODO submitCommands and submitScript which submit objects we can subsequently poll (cf JcloudsSshMachineLocation.submitRunScript) /** * Executes a set of commands, directly on the target machine (no wrapping in script). * Joined using {@literal ;} by default. * <p> * Stdout and stderr will be logged automatically to brooklyn.SSH logger, unless the * flags 'noStdoutLogging' and 'noStderrLogging' are set. To set a logging prefix, use * the flag 'logPrefix'. * <p> * Currently runs the commands in an interactive/login shell * by passing each as a line to bash. To terminate early, use: * <pre> * foo || exit 1 * </pre> * It may be desirable instead, in some situations, to wrap as: * <pre> * { line1 ; } && { line2 ; } ... * </pre> * and run as a single command (possibly not as an interacitve/login * shell) causing the script to exit on the first command which fails. * <p> * Currently this has to be done by the caller. * (If desired we can add a flag {@code exitIfAnyNonZero} to support this mode, * and/or {@code commandPrepend} and {@code commandAppend} similar to * (currently supported in SshjTool) {@code separator}.) */ public int execCommands(String summaryForLogging, List<String> commands) { return execCommands(MutableMap.<String,Object>of(), summaryForLogging, commands, MutableMap.<String,Object>of()); } public int execCommands(Map<String,?> props, String summaryForLogging, List<String> commands) { return execCommands(props, summaryForLogging, commands, MutableMap.<String,Object>of()); } public int execCommands(String summaryForLogging, List<String> commands, Map<String,?> env) { return execCommands(MutableMap.<String,Object>of(), summaryForLogging, commands, env); } public int execCommands(Map<String,?> props, String summaryForLogging, List<String> commands, Map<String,?> env) { return newExecWithLoggingHelpers().execCommands(props, summaryForLogging, commands, env); } /** * Executes a set of commands, wrapped as a script sent to the remote machine. * <p> * Stdout and stderr will be logged automatically to brooklyn.SSH logger, unless the * flags 'noStdoutLogging' and 'noStderrLogging' are set. To set a logging prefix, use * the flag 'logPrefix'. */ public int execScript(String summaryForLogging, List<String> commands) { return execScript(MutableMap.<String,Object>of(), summaryForLogging, commands, MutableMap.<String,Object>of()); } public int execScript(Map<String,?> props, String summaryForLogging, List<String> commands) { return execScript(props, summaryForLogging, commands, MutableMap.<String,Object>of()); } public int execScript(String summaryForLogging, List<String> commands, Map<String,?> env) { return execScript(MutableMap.<String,Object>of(), summaryForLogging, commands, env); } public int execScript(Map<String,?> props, String summaryForLogging, List<String> commands, Map<String,?> env) { return newExecWithLoggingHelpers().execScript(props, summaryForLogging, commands, env); } protected ExecWithLoggingHelpers newExecWithLoggingHelpers() { return new ExecWithLoggingHelpers("SSH") { @Override protected <T> T execWithTool(MutableMap<String, Object> props, Function<ShellTool, T> function) { return execSsh(props, function); } @Override protected void preExecChecks() { Preconditions.checkNotNull(address, "host address must be specified for ssh"); } @Override protected String constructDefaultLoggingPrefix(ConfigBag execFlags) { String hostname = getAddress().getHostName(); Integer port = execFlags.peek(SshTool.PROP_PORT); if (port == null) port = getConfig(ConfigUtils.prefixedKey(SshTool.BROOKLYN_CONFIG_KEY_PREFIX, SshTool.PROP_PORT)); return (user != null ? user+"@" : "") + hostname + (port != null ? ":"+port : ""); } @Override protected String getTargetName() { return ""+SshMachineLocation.this; } }.logger(logSsh); } protected int execWithLogging(Map<String,?> props, String summaryForLogging, List<String> commands, Map env, final Closure<Integer> execCommand) { return newExecWithLoggingHelpers().execWithLogging(props, summaryForLogging, commands, env, new ExecRunner() { @Override public int exec(ShellTool ssh, Map<String, ?> flags, List<String> cmds, Map<String, ?> env) { return execCommand.call(ssh, flags, cmds, env); }}); } public int copyTo(File src, File destination) { return copyTo(MutableMap.<String,Object>of(), src, destination); } public int copyTo(Map<String,?> props, File src, File destination) { return copyTo(props, src, destination.getPath()); } public int copyTo(File src, String destination) { return copyTo(MutableMap.<String,Object>of(), src, destination); } public int copyTo(Map<String,?> props, File src, String destination) { Preconditions.checkNotNull(address, "Host address must be specified for scp"); Preconditions.checkArgument(src.exists(), "File %s must exist for scp", src.getPath()); try { return copyTo(props, new FileInputStream(src), src.length(), destination); } catch (FileNotFoundException e) { throw Throwables.propagate(e); } } public int copyTo(Reader src, String destination) { return copyTo(MutableMap.<String,Object>of(), src, destination); } public int copyTo(Map<String,?> props, Reader src, String destination) { return copyTo(props, new ReaderInputStream(src), destination); } public int copyTo(InputStream src, String destination) { return copyTo(MutableMap.<String,Object>of(), src, destination); } public int copyTo(InputStream src, long filesize, String destination) { return copyTo(MutableMap.<String,Object>of(), src, filesize, destination); } // FIXME the return code is not a reliable indicator of success or failure public int copyTo(final Map<String,?> props, final InputStream src, final long filesize, final String destination) { if (filesize == -1) { return copyTo(props, src, destination); } else { return execSsh(props, new Function<ShellTool,Integer>() { public Integer apply(ShellTool ssh) { return ((SshTool) ssh).copyToServer(props, new KnownSizeInputStream(src, filesize), destination); }}); } } // FIXME the return code is not a reliable indicator of success or failure // Closes input stream before returning public int copyTo(final Map<String,?> props, final InputStream src, final String destination) { return execSsh(props, new Function<ShellTool,Integer>() { public Integer apply(ShellTool ssh) { return ((SshTool)ssh).copyToServer(props, src, destination); }}); } // FIXME the return code is not a reliable indicator of success or failure public int copyFrom(String remote, String local) { return copyFrom(MutableMap.<String,Object>of(), remote, local); } public int copyFrom(final Map<String,?> props, final String remote, final String local) { return execSsh(props, new Function<ShellTool,Integer>() { public Integer apply(ShellTool ssh) { return ((SshTool)ssh).copyFromServer(props, remote, new File(local)); }}); } public int installTo(String url, String destPath) { return installTo(MutableMap.<String, Object>of(), url, destPath); } public int installTo(Map<String,?> props, String url, String destPath) { return installTo(ResourceUtils.create(this), props, url, destPath); } public int installTo(ResourceUtils loader, String url, String destPath) { return installTo(loader, MutableMap.<String, Object>of(), url, destPath); } /** * Installs the given URL at the indicated destination path. * <p> * Attempts to curl the source URL on the remote machine, * then if that fails, loads locally (from classpath or file) and transfers. * <p> * Use {@link ArchiveUtils} to handle directories and their contents properly. * * TODO allow s3://bucket/file URIs for AWS S3 resources * TODO use PAX-URL style URIs for maven artifacts * TODO use subtasks here for greater visibility?; deprecate in favour of SshTasks.installFromUrl? * * @param utils A {@link ResourceUtils} that can resolve the source URLs * @param url The source URL to be installed * @param destPath The file to be created on the destination * * @see ArchiveUtils#deploy(String, SshMachineLocation, String) * @see ArchiveUtils#deploy(String, SshMachineLocation, String, String) * @see ResourceUtils#getResourceFromUrl(String) */ public int installTo(ResourceUtils utils, Map<String,?> props, String url, String destPath) { LOG.debug("installing {} to {} on {}, attempting remote curl", new Object[] { url, destPath, this }); try { PipedInputStream insO = new PipedInputStream(); OutputStream outO = new PipedOutputStream(insO); PipedInputStream insE = new PipedInputStream(); OutputStream outE = new PipedOutputStream(insE); StreamGobbler sgsO = new StreamGobbler(insO, null, LOG); sgsO.setLogPrefix("[curl @ "+address+":stdout] ").start(); StreamGobbler sgsE = new StreamGobbler(insE, null, LOG); sgsE.setLogPrefix("[curl @ "+address+":stdout] ").start(); Map<String, ?> sshProps = MutableMap.<String, Object>builder().putAll(props).put("out", outO).put("err", outE).build(); int result = execScript(sshProps, "copying remote resource "+url+" to server", ImmutableList.of( BashCommands.INSTALL_CURL, // TODO should hold the 'installing' mutex "mkdir -p `dirname '"+destPath+"'`", "curl "+url+" -L --silent --insecure --show-error --fail --connect-timeout 60 --max-time 600 --retry 5 -o '"+destPath+"'")); sgsO.close(); sgsE.close(); if (result != 0) { LOG.debug("installing {} to {} on {}, curl failed, attempting local fetch and copy", new Object[] { url, destPath, this }); try { Tasks.setBlockingDetails("retrieving resource "+url+" for copying across"); InputStream stream = utils.getResourceFromUrl(url); Tasks.setBlockingDetails("copying resource "+url+" to server"); result = copyTo(props, stream, destPath); } finally { Tasks.setBlockingDetails(null); } } if (result == 0) { LOG.debug("installing {} complete; {} on {}", new Object[] { url, destPath, this }); } else { LOG.warn("installing {} failed; {} on {}: {}", new Object[] { url, destPath, this, result }); } return result; } catch (IOException e) { throw Throwables.propagate(e); } } @Override public String toString() { return "SshMachineLocation["+getDisplayName()+":"+address+"]"; } @Override public String toVerboseString() { return Objects.toStringHelper(this).omitNullValues() .add("id", getId()).add("name", getDisplayName()) .add("user", getUser()).add("address", getAddress()).add("port", getConfig(SSH_PORT)) .add("parentLocation", getParent()) .toString(); } /** * @see #obtainPort(PortRange) * @see PortRanges#ANY_HIGH_PORT */ @Override public boolean obtainSpecificPort(int portNumber) { synchronized (usedPorts) { // TODO Does not yet check if the port really is free on this machine if (usedPorts.contains(portNumber)) { return false; } else { usedPorts.add(portNumber); return true; } } } @Override public int obtainPort(PortRange range) { synchronized (usedPorts) { for (int p: range) if (obtainSpecificPort(p)) return p; if (LOG.isDebugEnabled()) LOG.debug("unable to find port in {} on {}; returning -1", range, this); return -1; } } @Override public void releasePort(int portNumber) { synchronized (usedPorts) { usedPorts.remove((Object) portNumber); } } public boolean isSshable() { String cmd = "date"; try { try { Socket s = new Socket(getAddress(), getPort()); s.close(); } catch (IOException e) { if (LOG.isDebugEnabled()) LOG.debug(""+this+" not [yet] reachable (socket "+getAddress()+":"+getPort()+"): "+e); return false; } // this should do execCommands because sftp subsystem might not be available (or sometimes seems to take a while for it to become so?) int result = execCommands(MutableMap.<String,Object>of(), "isSshable", ImmutableList.of(cmd)); if (result == 0) { return true; } else { if (LOG.isDebugEnabled()) LOG.debug("Not reachable: {}, executing `{}`, exit code {}", new Object[] {this, cmd, result}); return false; } } catch (SshException e) { if (LOG.isDebugEnabled()) LOG.debug("Exception checking if "+this+" is reachable; assuming not", e); return false; } catch (IllegalStateException e) { if (LOG.isDebugEnabled()) LOG.debug("Exception checking if "+this+" is reachable; assuming not", e); return false; } catch (RuntimeException e) { if (Exceptions.getFirstThrowableOfType(e, IOException.class) != null) { if (LOG.isDebugEnabled()) LOG.debug("Exception checking if "+this+" is reachable; assuming not", e); return false; } else { throw e; } } } @Override public OsDetails getOsDetails() { return getMachineDetails().getOsDetails(); } @Override public MachineDetails getMachineDetails() { MachineDetails details = machineDetails; if (details == null) { // Or could just load and store several times Tasks.setBlockingDetails("Waiting for machine details"); try { synchronized (machineDetailsLock) { details = machineDetails; if (details == null) { machineDetails = details = BasicMachineDetails.forSshMachineLocation(this); } } } finally { Tasks.resetBlockingDetails(); } } return details; } @Override public void acquireMutex(String mutexId, String description) throws InterruptedException { mutexSupport.acquireMutex(mutexId, description); } @Override public boolean tryAcquireMutex(String mutexId, String description) { return mutexSupport.tryAcquireMutex(mutexId, description); } @Override public void releaseMutex(String mutexId) { mutexSupport.releaseMutex(mutexId); } @Override public boolean hasMutex(String mutexId) { return mutexSupport.hasMutex(mutexId); } //We want the SshMachineLocation to be serializable and therefore the pool needs to be dealt with correctly. //In this case we are not serializing the pool (we made the field transient) and create a new pool when deserialized. //This fix is currently needed for experiments, but isn't used in normal Brooklyn usage. private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); sshPoolCache = buildSshToolPoolCacheLoader(); } /** returns the un-passphrased key-pair info if a key is being used, or else null */ public KeyPair findKeyPair() { String fn = getConfig(SshTool.PROP_PRIVATE_KEY_FILE); ResourceUtils r = ResourceUtils.create(this); if (fn!=null) return SecureKeys.readPem(r.getResourceFromUrl(fn), getConfig(SshTool.PROP_PRIVATE_KEY_PASSPHRASE)); String data = getConfig(SshTool.PROP_PRIVATE_KEY_DATA); if (data!=null) return SecureKeys.readPem(new ReaderInputStream(new StringReader(data)), getConfig(SshTool.PROP_PRIVATE_KEY_PASSPHRASE)); if (findPassword()!=null) // if above not specified, and password is, use password return null; // fall back to id_rsa and id_dsa if (new File( Urls.mergePaths(System.getProperty("user.home"), ".ssh/id_rsa") ).exists() ) return SecureKeys.readPem(r.getResourceFromUrl("~/.ssh/id_rsa"), getConfig(SshTool.PROP_PRIVATE_KEY_PASSPHRASE)); if (new File( Urls.mergePaths(System.getProperty("user.home"), ".ssh/id_dsa") ).exists() ) return SecureKeys.readPem(r.getResourceFromUrl("~/.ssh/id_dsa"), getConfig(SshTool.PROP_PRIVATE_KEY_PASSPHRASE)); LOG.warn("Unable to extract any key or passphrase data in request to findKeyPair for "+this); return null; } /** returns the password being used to log in, if a password is being used, or else null */ public String findPassword() { return getConfig(SshTool.PROP_PASSWORD); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.settings.notification; import android.app.Notification; import android.content.Context; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.pm.ResolveInfo; import android.os.Bundle; import android.os.UserHandle; import android.preference.Preference; import android.preference.Preference.OnPreferenceChangeListener; import android.preference.Preference.OnPreferenceClickListener; import android.preference.SwitchPreference; import android.provider.Settings; import android.text.TextUtils; import android.util.ArrayMap; import android.util.Log; import android.widget.Toast; import com.android.internal.logging.MetricsLogger; import com.android.internal.widget.LockPatternUtils; import com.android.settings.AppHeader; import com.android.settings.R; import com.android.settings.SettingsPreferenceFragment; import com.android.settings.Utils; import com.android.settings.applications.AppInfoBase; import com.android.settings.applications.AppInfoWithHeader; import com.android.settings.notification.NotificationBackend.AppRow; import java.util.List; /** These settings are per app, so should not be returned in global search results. */ public class AppNotificationSettings extends SettingsPreferenceFragment { private static final String TAG = "AppNotificationSettings"; private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG); private static final String KEY_BLOCK = "block"; private static final String KEY_PRIORITY = "priority"; private static final String KEY_PEEKABLE = "peekable"; private static final String KEY_SENSITIVE = "sensitive"; private static final String KEY_APP_SETTINGS = "app_settings"; private static final Intent APP_NOTIFICATION_PREFS_CATEGORY_INTENT = new Intent(Intent.ACTION_MAIN) .addCategory(Notification.INTENT_CATEGORY_NOTIFICATION_PREFERENCES); private final NotificationBackend mBackend = new NotificationBackend(); private Context mContext; private SwitchPreference mBlock; private SwitchPreference mPriority; private SwitchPreference mPeekable; private SwitchPreference mSensitive; private AppRow mAppRow; private boolean mCreated; private boolean mIsSystemPackage; private int mUid; @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (DEBUG) Log.d(TAG, "onActivityCreated mCreated=" + mCreated); if (mCreated) { Log.w(TAG, "onActivityCreated: ignoring duplicate call"); return; } mCreated = true; if (mAppRow == null) return; AppHeader.createAppHeader(this, mAppRow.icon, mAppRow.label, AppInfoWithHeader.getInfoIntent(this, mAppRow.pkg)); } @Override protected int getMetricsCategory() { return MetricsLogger.NOTIFICATION_APP_NOTIFICATION; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mContext = getActivity(); Intent intent = getActivity().getIntent(); Bundle args = getArguments(); if (DEBUG) Log.d(TAG, "onCreate getIntent()=" + intent); if (intent == null && args == null) { Log.w(TAG, "No intent"); toastAndFinish(); return; } final String pkg = args != null && args.containsKey(AppInfoBase.ARG_PACKAGE_NAME) ? args.getString(AppInfoBase.ARG_PACKAGE_NAME) : intent.getStringExtra(Settings.EXTRA_APP_PACKAGE); mUid = args != null && args.containsKey(AppInfoBase.ARG_PACKAGE_UID) ? args.getInt(AppInfoBase.ARG_PACKAGE_UID) : intent.getIntExtra(Settings.EXTRA_APP_UID, -1); if (mUid == -1 || TextUtils.isEmpty(pkg)) { Log.w(TAG, "Missing extras: " + Settings.EXTRA_APP_PACKAGE + " was " + pkg + ", " + Settings.EXTRA_APP_UID + " was " + mUid); toastAndFinish(); return; } if (DEBUG) Log.d(TAG, "Load details for pkg=" + pkg + " uid=" + mUid); final PackageManager pm = getPackageManager(); final PackageInfo info = findPackageInfo(pm, pkg, mUid); if (info == null) { Log.w(TAG, "Failed to find package info: " + Settings.EXTRA_APP_PACKAGE + " was " + pkg + ", " + Settings.EXTRA_APP_UID + " was " + mUid); toastAndFinish(); return; } mIsSystemPackage = Utils.isSystemPackage(pm, info); addPreferencesFromResource(R.xml.app_notification_settings); mBlock = (SwitchPreference) findPreference(KEY_BLOCK); mPriority = (SwitchPreference) findPreference(KEY_PRIORITY); mPeekable = (SwitchPreference) findPreference(KEY_PEEKABLE); mSensitive = (SwitchPreference) findPreference(KEY_SENSITIVE); mAppRow = mBackend.loadAppRow(pm, info.applicationInfo); // load settings intent ArrayMap<String, AppRow> rows = new ArrayMap<String, AppRow>(); rows.put(mAppRow.pkg, mAppRow); collectConfigActivities(getPackageManager(), rows); mBlock.setChecked(mAppRow.banned); updateDependents(mAppRow.banned); mPriority.setChecked(mAppRow.priority); mPeekable.setChecked(mAppRow.peekable); mSensitive.setChecked(mAppRow.sensitive); mBlock.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { final boolean banned = (Boolean) newValue; if (banned) { MetricsLogger.action(getActivity(), MetricsLogger.ACTION_BAN_APP_NOTES, pkg); } final boolean success = mBackend.setNotificationsBanned(pkg, mUid, banned); if (success) { updateDependents(banned); } return success; } }); mPriority.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { final boolean priority = (Boolean) newValue; return mBackend.setHighPriority(pkg, mUid, priority); } }); mPeekable.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { final boolean peekable = (Boolean) newValue; return mBackend.setPeekable(pkg, mUid, peekable); } }); mSensitive.setOnPreferenceChangeListener(new OnPreferenceChangeListener() { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { final boolean sensitive = (Boolean) newValue; return mBackend.setSensitive(pkg, mUid, sensitive); } }); if (mAppRow.settingsIntent != null) { findPreference(KEY_APP_SETTINGS).setOnPreferenceClickListener( new OnPreferenceClickListener() { @Override public boolean onPreferenceClick(Preference preference) { mContext.startActivity(mAppRow.settingsIntent); return true; } }); } else { removePreference(KEY_APP_SETTINGS); } } @Override public void onResume() { super.onResume(); if (mUid != -1 && getPackageManager().getPackagesForUid(mUid) == null) { // App isn't around anymore, must have been removed. finish(); } } private void updateDependents(boolean banned) { final boolean lockscreenSecure = new LockPatternUtils(getActivity()).isSecure( UserHandle.myUserId()); final boolean lockscreenNotificationsEnabled = getLockscreenNotificationsEnabled(); final boolean allowPrivate = getLockscreenAllowPrivateNotifications(); setVisible(mBlock, !mIsSystemPackage); setVisible(mPriority, mIsSystemPackage || !banned); setVisible(mPeekable, mIsSystemPackage || !banned); setVisible(mSensitive, mIsSystemPackage || !banned && lockscreenSecure && lockscreenNotificationsEnabled && allowPrivate); } private void setVisible(Preference p, boolean visible) { final boolean isVisible = getPreferenceScreen().findPreference(p.getKey()) != null; if (isVisible == visible) return; if (visible) { getPreferenceScreen().addPreference(p); } else { getPreferenceScreen().removePreference(p); } } private boolean getLockscreenNotificationsEnabled() { return Settings.Secure.getInt(getContentResolver(), Settings.Secure.LOCK_SCREEN_SHOW_NOTIFICATIONS, 0) != 0; } private boolean getLockscreenAllowPrivateNotifications() { return Settings.Secure.getInt(getContentResolver(), Settings.Secure.LOCK_SCREEN_ALLOW_PRIVATE_NOTIFICATIONS, 0) != 0; } private void toastAndFinish() { Toast.makeText(mContext, R.string.app_not_found_dlg_text, Toast.LENGTH_SHORT).show(); getActivity().finish(); } private static PackageInfo findPackageInfo(PackageManager pm, String pkg, int uid) { final String[] packages = pm.getPackagesForUid(uid); if (packages != null && pkg != null) { final int N = packages.length; for (int i = 0; i < N; i++) { final String p = packages[i]; if (pkg.equals(p)) { try { return pm.getPackageInfo(pkg, PackageManager.GET_SIGNATURES); } catch (NameNotFoundException e) { Log.w(TAG, "Failed to load package " + pkg, e); } } } } return null; } public static List<ResolveInfo> queryNotificationConfigActivities(PackageManager pm) { if (DEBUG) Log.d(TAG, "APP_NOTIFICATION_PREFS_CATEGORY_INTENT is " + APP_NOTIFICATION_PREFS_CATEGORY_INTENT); final List<ResolveInfo> resolveInfos = pm.queryIntentActivities( APP_NOTIFICATION_PREFS_CATEGORY_INTENT, 0 //PackageManager.MATCH_DEFAULT_ONLY ); return resolveInfos; } public static void collectConfigActivities(PackageManager pm, ArrayMap<String, AppRow> rows) { final List<ResolveInfo> resolveInfos = queryNotificationConfigActivities(pm); applyConfigActivities(pm, rows, resolveInfos); } public static void applyConfigActivities(PackageManager pm, ArrayMap<String, AppRow> rows, List<ResolveInfo> resolveInfos) { if (DEBUG) Log.d(TAG, "Found " + resolveInfos.size() + " preference activities" + (resolveInfos.size() == 0 ? " ;_;" : "")); for (ResolveInfo ri : resolveInfos) { final ActivityInfo activityInfo = ri.activityInfo; final ApplicationInfo appInfo = activityInfo.applicationInfo; final AppRow row = rows.get(appInfo.packageName); if (row == null) { if (DEBUG) Log.v(TAG, "Ignoring notification preference activity (" + activityInfo.name + ") for unknown package " + activityInfo.packageName); continue; } if (row.settingsIntent != null) { if (DEBUG) Log.v(TAG, "Ignoring duplicate notification preference activity (" + activityInfo.name + ") for package " + activityInfo.packageName); continue; } row.settingsIntent = new Intent(APP_NOTIFICATION_PREFS_CATEGORY_INTENT) .setClassName(activityInfo.packageName, activityInfo.name); } } }
/* * #%L * MariaDB4j * %% * Copyright (C) 2012 - 2017 Michael Vorburger * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package ch.vorburger.mariadb4j; import ch.vorburger.exec.ManagedProcess; import ch.vorburger.exec.ManagedProcessBuilder; import ch.vorburger.exec.ManagedProcessException; import ch.vorburger.exec.ManagedProcessListener; import ch.vorburger.exec.OutputStreamLogDispatcher; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.util.Arrays; import java.util.List; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides capability to install, start, and use an embedded database. * * @author Michael Vorburger * @author Michael Seaton * @author Gordon Little */ public class DB { private static final Logger logger = LoggerFactory.getLogger(DB.class); protected final DBConfiguration configuration; private File baseDir; private File libDir; private File dataDir; private ManagedProcess mysqldProcess; protected int dbStartMaxWaitInMS = 30000; protected DB(DBConfiguration config) { configuration = config; } public DBConfiguration getConfiguration() { return configuration; } /** * This factory method is the mechanism for constructing a new embedded database for use. This * method automatically installs the database and prepares it for use. * * @param config Configuration of the embedded instance * @return a new DB instance * @throws ManagedProcessException if something fatal went wrong */ public static DB newEmbeddedDB(DBConfiguration config) throws ManagedProcessException { DB db = new DB(config); db.prepareDirectories(); db.unpackEmbeddedDb(); db.install(); return db; } /** * This factory method is the mechanism for constructing a new embedded database for use. This * method automatically installs the database and prepares it for use with default * configuration, allowing only for specifying port. * * @param port the port to start the embedded database on * @return a new DB instance * @throws ManagedProcessException if something fatal went wrong */ public static DB newEmbeddedDB(int port) throws ManagedProcessException { DBConfigurationBuilder config = new DBConfigurationBuilder(); config.setPort(port); return newEmbeddedDB(config.build()); } protected ManagedProcess createDBInstallProcess() throws ManagedProcessException, IOException { logger.info("Installing a new embedded database to: " + baseDir); File installDbCmdFile = newExecutableFile("bin", "mysql_install_db"); if (!installDbCmdFile.exists()) { installDbCmdFile = newExecutableFile("scripts", "mysql_install_db"); } if (!installDbCmdFile.exists()) { throw new ManagedProcessException( "mysql_install_db was not found, neither in bin/ nor in scripts/ under " + baseDir.getAbsolutePath()); } ManagedProcessBuilder builder = new ManagedProcessBuilder(installDbCmdFile); builder.setOutputStreamLogDispatcher(getOutputStreamLogDispatcher("mysql_install_db")); builder.getEnvironment().put(configuration.getOSLibraryEnvironmentVarName(), libDir.getAbsolutePath()); builder.setWorkingDirectory(baseDir); if (!configuration.isWindows()) { builder.addFileArgument("--datadir", dataDir); builder.addFileArgument("--basedir", baseDir); builder.addArgument("--no-defaults"); builder.addArgument("--force"); builder.addArgument("--skip-name-resolve"); // builder.addArgument("--verbose"); } else { builder.addFileArgument("--datadir", dataDir.getCanonicalFile()); } return builder.build(); } /** * Installs the database to the location specified in the configuration. * * @throws ManagedProcessException if something fatal went wrong */ synchronized protected void install() throws ManagedProcessException { try { ManagedProcess mysqlInstallProcess = createDBInstallProcess(); mysqlInstallProcess.start(); mysqlInstallProcess.waitForExit(); } catch (Exception e) { throw new ManagedProcessException("An error occurred while installing the database", e); } logger.info("Installation complete."); } protected String getWinExeExt() { return configuration.isWindows() ? ".exe" : ""; } /** * Starts up the database, using the data directory and port specified in the configuration. * * @throws ManagedProcessException if something fatal went wrong */ public synchronized void start() throws ManagedProcessException { logger.info("Starting up the database..."); boolean ready = false; try { mysqldProcess = startPreparation(); ready = mysqldProcess.startAndWaitForConsoleMessageMaxMs(getReadyForConnectionsTag(), dbStartMaxWaitInMS); } catch (Exception e) { logger.error("failed to start mysqld", e); throw new ManagedProcessException("An error occurred while starting the database", e); } if (!ready) { if (mysqldProcess != null && mysqldProcess.isAlive()) { mysqldProcess.destroy(); } throw new ManagedProcessException("Database does not seem to have started up correctly? Magic string not seen in " + dbStartMaxWaitInMS + "ms: " + getReadyForConnectionsTag() + mysqldProcess.getLastConsoleLines()); } logger.info("Database startup complete."); } protected String getReadyForConnectionsTag() { return "mysqld" + getWinExeExt() + ": ready for connections."; } synchronized ManagedProcess startPreparation() throws ManagedProcessException, IOException { ManagedProcessBuilder builder = new ManagedProcessBuilder(newExecutableFile("bin", "mysqld")); builder.setOutputStreamLogDispatcher(getOutputStreamLogDispatcher("mysqld")); builder.getEnvironment().put(configuration.getOSLibraryEnvironmentVarName(), libDir.getAbsolutePath()); builder.addArgument("--no-defaults"); // *** THIS MUST COME FIRST *** builder.addArgument("--console"); if (configuration.isSecurityDisabled()) { builder.addArgument("--skip-grant-tables"); } if (!hasArgument("--max_allowed_packet")) { builder.addArgument("--max_allowed_packet=64M"); } builder.addFileArgument("--basedir", baseDir).setWorkingDirectory(baseDir); if (!configuration.isWindows()) { builder.addFileArgument("--datadir", dataDir); } else { builder.addFileArgument("--datadir", dataDir.getCanonicalFile()); } addPortAndMaybeSocketArguments(builder); for (String arg : configuration.getArgs()) { builder.addArgument(arg); } if (StringUtils.isNotBlank(configuration.getDefaultCharacterSet())) { builder.addArgument("--character-set-server=", configuration.getDefaultCharacterSet()); } cleanupOnExit(); // because cleanupOnExit() just installed our (class DB) own // Shutdown hook, we don't need the one from ManagedProcess: builder.setDestroyOnShutdown(false); logger.info("mysqld executable: " + builder.getExecutable()); return builder.build(); } protected boolean hasArgument(final String argumentName) { for (String argument : configuration.getArgs()) { if (argument.startsWith(argumentName)) { return true; } } return false; } protected File newExecutableFile(String dir, String exec) { return new File(baseDir, dir + "/" + exec + getWinExeExt()); } protected void addPortAndMaybeSocketArguments(ManagedProcessBuilder builder) throws IOException { builder.addArgument("--port=" + configuration.getPort()); if (!configuration.isWindows()) { builder.addFileArgument("--socket", getAbsoluteSocketFile()); } } protected void addSocketOrPortArgument(ManagedProcessBuilder builder) throws IOException { if (!configuration.isWindows()) { builder.addFileArgument("--socket", getAbsoluteSocketFile()); } else { builder.addArgument("--port=" + configuration.getPort()); } } /** * Config Socket as absolute path. By default this is the case because DBConfigurationBuilder * creates the socket in /tmp, but if a user uses setSocket() he may give a relative location, * so we double check. * * @return config.getSocket() as File getAbsolutePath() */ protected File getAbsoluteSocketFile() { String socket = configuration.getSocket(); File socketFile = new File(socket); return socketFile.getAbsoluteFile(); } public void source(String resource) throws ManagedProcessException { source(resource, null, null, null); } public void source(InputStream resource) throws ManagedProcessException { source(resource, null, null, null); } public void source(String resource, String dbName) throws ManagedProcessException { source(resource, null, null, dbName); } public void source(InputStream resource, String dbName) throws ManagedProcessException { source(resource, null, null, dbName); } /** * Takes in a {@link InputStream} and sources it via the mysql command line tool. * * @param resource an {@link InputStream} InputStream to source * @param username the username used to login to the database * @param password the password used to login to the database * @param dbName the name of the database (schema) to source into * @throws ManagedProcessException if something fatal went wrong */ public void source(InputStream resource, String username, String password, String dbName) throws ManagedProcessException { run("script file sourced from an InputStream", resource, username, password, dbName, false); } /** * Takes in a string that represents a resource on the classpath and sources it via the mysql * command line tool. * * @param resource the path to a resource on the classpath to source * @param username the username used to login to the database * @param password the password used to login to the database * @param dbName the name of the database (schema) to source into * @throws ManagedProcessException if something fatal went wrong */ public void source(String resource, String username, String password, String dbName) throws ManagedProcessException { source(resource, username, password, dbName, false); } /** * Takes in a string that represents a resource on the classpath and sources it via the mysql * command line tool. Optionally force continue if individual statements fail. * * @param resource the path to a resource on the classpath to source * @param username the username used to login to the database * @param password the password used to login to the database * @param dbName the name of the database (schema) to source into * @param force if true then continue on error (mysql --force) * @throws ManagedProcessException if something fatal went wrong */ public void source(String resource, String username, String password, String dbName, boolean force) throws ManagedProcessException { try (InputStream from = getClass().getClassLoader().getResourceAsStream(resource)) { if (from == null) { throw new IllegalArgumentException("Could not find script file on the classpath at: " + resource); } run("script file sourced from the classpath at: " + resource, from, username, password, dbName, force); } catch (IOException ioe) { logger.warn("Issue trying to close source InputStream. Raise warning and continue.", ioe); } } public void run(String command, String username, String password, String dbName) throws ManagedProcessException { run(command, username, password, dbName, false, true); } public void run(String command) throws ManagedProcessException { run(command, null, null, null); } public void run(String command, String username, String password) throws ManagedProcessException { run(command, username, password, null); } public void run(String command, String username, String password, String dbName, boolean force) throws ManagedProcessException { run(command, username, password, dbName, force, true); } public void run(String command, String username, String password, String dbName, boolean force, boolean verbose) throws ManagedProcessException { // If resource is created here, it should probably be released here also (as opposed to in protected run method) // Also move to try-with-resource syntax to remove closeQuietly deprecation errors. try (InputStream from = IOUtils.toInputStream(command, Charset.defaultCharset())) { final String logInfoText = verbose ? "command: " + command : "command (" + command.length() / 1_024 + " KiB long)"; run(logInfoText, from, username, password, dbName, force); } catch (IOException ioe) { logger.warn("Issue trying to close source InputStream. Raise warning and continue.", ioe); } } protected void run(String logInfoText, InputStream fromIS, String username, String password, String dbName, boolean force) throws ManagedProcessException { logger.info("Running a " + logInfoText); try { ManagedProcessBuilder builder = new ManagedProcessBuilder(newExecutableFile("bin", "mysql")); builder.setOutputStreamLogDispatcher(getOutputStreamLogDispatcher("mysql")); builder.setWorkingDirectory(baseDir); if (username != null && !username.isEmpty()) { builder.addArgument("-u", username); } if (password != null && !password.isEmpty()) { builder.addArgument("-p", password); } if (dbName != null && !dbName.isEmpty()) { builder.addArgument("-D", dbName); } if (force) { builder.addArgument("-f"); } addSocketOrPortArgument(builder); if (fromIS != null) { builder.setInputStream(fromIS); } if (configuration.getProcessListener() != null) { builder.setProcessListener(configuration.getProcessListener()); } if (configuration.getDefaultCharacterSet() != null) { builder.addArgument("--default-character-set=", configuration.getDefaultCharacterSet()); } ManagedProcess process = builder.build(); process.start(); process.waitForExit(); } catch (Exception e) { throw new ManagedProcessException("An error occurred while running a " + logInfoText, e); } logger.info("Successfully ran the " + logInfoText); } public void createDB(String dbName) throws ManagedProcessException { this.run("create database if not exists `" + dbName + "`;"); } public void createDB(String dbName, String username, String password) throws ManagedProcessException { this.run("create database if not exists `" + dbName + "`;", username, password); } protected OutputStreamLogDispatcher getOutputStreamLogDispatcher(@SuppressWarnings("unused") String exec) { return new MariaDBOutputStreamLogDispatcher(); } /** * Stops the database. * * @throws ManagedProcessException if something fatal went wrong */ public synchronized void stop() throws ManagedProcessException { if (mysqldProcess != null && mysqldProcess.isAlive()) { logger.debug("Stopping the database..."); mysqldProcess.destroy(); logger.info("Database stopped."); } else { logger.debug("Database was already stopped."); } } /** * Based on the current OS, unpacks the appropriate version of MariaDB to the file system based * on the configuration. */ protected void unpackEmbeddedDb() { if (configuration.getBinariesClassPathLocation() == null) { logger.info("Not unpacking any embedded database (as BinariesClassPathLocation configuration is null)"); return; } try { Util.extractFromClasspathToFile(configuration.getBinariesClassPathLocation(), baseDir); if (!configuration.isWindows()) { Util.forceExecutable(newExecutableFile("bin", "my_print_defaults")); Util.forceExecutable(newExecutableFile("bin", "mysql_install_db")); Util.forceExecutable(newExecutableFile("scripts", "mysql_install_db")); Util.forceExecutable(newExecutableFile("bin", "mysqld")); Util.forceExecutable(newExecutableFile("bin", "mysqldump")); Util.forceExecutable(newExecutableFile("bin", "mysql")); } } catch (IOException e) { throw new RuntimeException("Error unpacking embedded DB", e); } } /** * If the data directory specified in the configuration is a temporary directory, this deletes * any previous version. It also makes sure that the directory exists. * * @throws ManagedProcessException if something fatal went wrong */ protected void prepareDirectories() throws ManagedProcessException { baseDir = Util.getDirectory(configuration.getBaseDir()); libDir = Util.getDirectory(configuration.getLibDir()); try { String dataDirPath = configuration.getDataDir(); if (Util.isTemporaryDirectory(dataDirPath)) { FileUtils.deleteDirectory(new File(dataDirPath)); } dataDir = Util.getDirectory(dataDirPath); } catch (Exception e) { throw new ManagedProcessException("An error occurred while preparing the data directory", e); } } /** * Adds a shutdown hook to ensure that when the JVM exits, the database is stopped, and any * temporary data directories are cleaned up. */ protected void cleanupOnExit() { String threadName = "Shutdown Hook Deletion Thread for Temporary DB " + dataDir.toString(); final DB db = this; Runtime.getRuntime() .addShutdownHook(new DBShutdownHook(threadName, db, () -> mysqldProcess, () -> baseDir, () -> dataDir, configuration)); } // The dump*() methods are intentionally *NOT* made "synchronized", // (even though with --lock-tables one could not run two dumps concurrently anyway) // because in theory this could cause a long-running dump to deadlock an application // wanting to stop() a DB. Let it thus be a caller's responsibility to not dump // concurrently (and if she does, it just fails, which is much better than an // unexpected deadlock). public ManagedProcess dumpXML(File outputFile, String dbName, String user, String password) throws IOException, ManagedProcessException { return dump(outputFile, Arrays.asList(dbName), true, true, true, user, password); } public ManagedProcess dumpSQL(File outputFile, String dbName, String user, String password) throws IOException, ManagedProcessException { return dump(outputFile, Arrays.asList(dbName), true, true, false, user, password); } protected ManagedProcess dump(File outputFile, List<String> dbNamesToDump, boolean compactDump, boolean lockTables, boolean asXml, String user, String password) throws ManagedProcessException, IOException { ManagedProcessBuilder builder = new ManagedProcessBuilder(newExecutableFile("bin", "mysqldump")); BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream(outputFile)); builder.addStdOut(outputStream); builder.setOutputStreamLogDispatcher(getOutputStreamLogDispatcher("mysqldump")); builder.addArgument("--port=" + configuration.getPort()); if (!configuration.isWindows()) { builder.addFileArgument("--socket", getAbsoluteSocketFile()); } if (lockTables) { builder.addArgument("--flush-logs"); builder.addArgument("--lock-tables"); } if (compactDump) { builder.addArgument("--compact"); } if (asXml) { builder.addArgument("--xml"); } if (StringUtils.isNotBlank(user)) { builder.addArgument("-u"); builder.addArgument(user); if (StringUtils.isNotBlank(password)) { builder.addArgument("-p" + password); } } builder.addArgument(StringUtils.join(dbNamesToDump, StringUtils.SPACE)); builder.setDestroyOnShutdown(true); builder.setProcessListener(new ManagedProcessListener() { @Override public void onProcessComplete(int i) { closeOutputStream(); } @Override public void onProcessFailed(int i, Throwable throwable) { closeOutputStream(); } private void closeOutputStream() { try { outputStream.close(); } catch (IOException exception) { logger.error("Problem while trying to close the stream to the file containing the DB dump", exception); } } }); return builder.build(); } }
package com.alienobject.textpipe.services.http; import com.alienobject.textpipe.services.http.HTTPResponse.Type; import sun.net.www.protocol.http.HttpURLConnection; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; public class HTTPRequest implements Callable<HTTPResponse> { public enum RedirectPolicy { NOFOLLOW, FOLLOW } protected HTTPMethod method = null; protected URL url = null; protected Map<String, String> headers = null; protected String contentType = Type.TEXT_PLAIN.toString(); protected String body = null; protected String userAgent = "Javier/HTTPService 1.0"; protected static int defaultConnectTimeout = 20000; protected static int defaultReadTimeout = 20000; protected int connectTimeout; protected int readTimeout; protected RedirectPolicy followRedirects = RedirectPolicy.FOLLOW; protected HTTPResponse response = null; protected long ifModifiedSince = -1; protected HttpURLConnection connection = null; public HTTPRequest(HTTPMethod method, URL url, Map<String, String> headers) { this.method = method; this.url = url; this.headers = new HashMap<String, String>(headers); this.connectTimeout = HTTPRequest.defaultConnectTimeout; this.readTimeout = HTTPRequest.defaultReadTimeout; this.response = new HTTPResponse(); } public HTTPRequest(URL url) { this(HTTPMethod.GET, url, new HashMap<String, String>()); } public HTTPRequest(String url) throws MalformedURLException { this(new URL(url)); } protected HttpURLConnection createConnection() throws IOException { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(method.toString()); for (String k : headers.keySet()) { connection.setRequestProperty(k, headers.get(k)); } return connection; } public HTTPMethod getMethod() { return method; } public void setMethod(HTTPMethod method) { this.method = method; } public URL getUrl() { return url; } public void setUrl(URL url) { this.url = url; } public Map<String, String> getHeaders() { return headers; } public void setHeaders(Map<String, String> headers) { this.headers = headers; } public long getConnectTimeout() { return connectTimeout; } public void setConnectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; } public long getReadTimeout() { return readTimeout; } public void setReadTimeout(int readTimeout) { this.readTimeout = readTimeout; } public static synchronized int getDefaultConnectTimeout() { return defaultConnectTimeout; } public static synchronized void setDefaultConnectTimeout( int defaultConnectTimeout) { HTTPRequest.defaultConnectTimeout = defaultConnectTimeout; } public static synchronized int getDefaultReadTimeout() { return defaultReadTimeout; } public static synchronized void setDefaultReadTimeout(int defaultReadTimeout) { HTTPRequest.defaultReadTimeout = defaultReadTimeout; } public String getContentType() { return contentType; } public void setContentType(String contentType) { this.contentType = contentType; } public String getBody() { return body; } public void setBody(String body) { this.body = body; } public String getUserAgent() { return userAgent; } public void setUserAgent(String userAgent) { this.userAgent = userAgent; } public boolean hasBody() { return body != null; } public long getContentLength() { if (body != null) { return body.length(); } return 0; } public RedirectPolicy getFollowRedirects() { return followRedirects; } public void setFollowRedirects(RedirectPolicy followRedirects) { this.followRedirects = followRedirects; } public HTTPResponse getResponse() { return response; } public void setResponse(HTTPResponse response) { this.response = response; } public HTTPResponse call() throws IOException { doRequest(); return this.response; } public void doRequest() throws IOException { this.connection = makeConnection(); initializeConnection(); performRequest(); } protected void readResponseBody(HttpURLConnection connection) { InputStream inputStream; try { inputStream = connection.getInputStream(); } catch (IOException e) { e.printStackTrace(); return; } String urlContent; try { urlContent = HTTPService.downloadStream(inputStream); response.setBody(urlContent); } catch (IOException e) { e.printStackTrace(); } finally { try { inputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } protected void writeRequestBody() { OutputStream outputStream; try { outputStream = connection.getOutputStream(); } catch (IOException e1) { e1.printStackTrace(); return; } OutputStreamWriter outputStreamWriter = new OutputStreamWriter(outputStream); try { outputStreamWriter.write(getBody()); outputStreamWriter.flush(); } catch (IOException e) { e.printStackTrace(); } finally { try { outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } protected void performRequest() throws IOException { response.setResponseCode(connection.getResponseCode()); response.setContentType(connection.getContentType()); response.setResponseHeaders(connection.getHeaderFields()); if (hasBody()) { writeRequestBody(); } readResponseBody(connection); response.setExpires(connection.getExpiration()); response.setLastModified(connection.getLastModified()); response.setDate(connection.getDate()); } protected void initializeConnection() { } protected HttpURLConnection makeConnection() throws IOException { HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setConnectTimeout(connectTimeout); connection.setReadTimeout(readTimeout); if (this.followRedirects == RedirectPolicy.FOLLOW) { connection.setInstanceFollowRedirects(true); } if (hasBody()) { connection.setDoOutput(true); } if (ifModifiedSince != -1) { connection.setIfModifiedSince(ifModifiedSince); } connection.setRequestProperty("User-Agent", getUserAgent()); return connection; } public long getIfModifiedSince() { return ifModifiedSince; } public void setIfModifiedSince(long ifModifiedSince) { this.ifModifiedSince = ifModifiedSince; } public static void main(String[] args) throws Exception { HTTPRequest r = new HTTPRequest(new URL("http://www.cnn.com")); System.out.println(HTTPService.getInstance().request(r).getResponseHeaders()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aevi.print.model; import com.aevi.util.json.JsonConverter; import com.aevi.util.json.Jsonable; import java.util.Map; import static com.aevi.print.util.Preconditions.checkNotNull; /** * Contains information such as name, DPI and paper width for a specific PrinterSettings on the Device. */ public class PrinterSettings implements Jsonable { public static final String OPTION_DEFAULT = "default"; private final String printerId; private final int paperWidth; private final int printableWidth; private final float paperDotsPmm; private final PaperKind paperKind; private final String[] commands; private final int[] codepages; private final Map<String, String> options; private final boolean canHandleCommands; private final boolean doesReportStatus; private final boolean doesSupportCodepages; private final String[] supportedLanguages; private final PrinterFont[] printerFonts; private String displayName; PrinterSettings(String printerId, int paperWidth, int printableWidth, float paperDotsPmm, PaperKind paperKind, PrinterFont[] printerFonts, boolean canHandleCommands, String[] commands, boolean doesReportStatus, int[] codepages, boolean doesSupportCodepages, Map<String, String> options, String[] supportedLanguages) { this.printerId = checkNotNull(printerId, "printerId must not be null") ; this.paperKind = checkNotNull(paperKind, "paperKind must not be null"); this.paperWidth = paperWidth; this.printableWidth = printableWidth; this.paperDotsPmm = paperDotsPmm; this.commands = commands; this.codepages = codepages; this.options = options; this.canHandleCommands = canHandleCommands; this.doesReportStatus = doesReportStatus; this.doesSupportCodepages = doesSupportCodepages; this.printerFonts = printerFonts; this.supportedLanguages = supportedLanguages; } /** * Gets the unique ID of this printer. * * @return the ID of the printer, must be unique. */ public String getPrinterId() { return printerId; } /** * Gets the printer name that will be displayed to the users. If this has not been set the printer ID will be used * * @return the display name of the printer. */ public String getDisplayName() { if (displayName== null || displayName.isEmpty()) { return printerId; } return displayName; } /** * Sets the printer name that will be displayed to the users. * * @param displayName The name of the printer to be shown to the user. This parameter must not be null. */ public void setDisplayName(String displayName) { this.displayName = displayName; } /** * Gets the width of the paper in mm. * * @return the width of the paper in mm. */ public int getPaperWidth() { return paperWidth; } /** * Gets the actual available printing width in mm. This is usually the width of the paper minus any left/right margins * * @return The available printing width in mm */ public int getPrintableWidth() { return printableWidth; } /** * Gets the resolution of the printer in dots per mm (dpmm). * * @return the dots per mm this printer has for the given paper size above. */ public float getPaperDotsPerMm() { return paperDotsPmm; } /** * Gets the kind of paper this printer prints on. * * @return the kind of paper this printer prints on. */ public PaperKind getPaperKind() { return paperKind; } /** * Gets a list of fonts this printer supports * * @return A List of printer fonts */ public PrinterFont[] getPrinterFonts() { return printerFonts; } /** * A list of commands this printer can execute * * @return A list of string commands that can be sent to this printer to perform printer specific functionality */ public String[] getCommands() { return commands; } /** * A list of codepages supported by this printer * * @return A list of codepage ids that can be used with this printer. See printer specific documentation for details */ public int[] getCodepages() { return codepages; } /** * @return True if this printer makes use of codepages which will be given in the list {@link #getCodepages()} */ public boolean doesSupportCodePages() { return doesSupportCodepages; } /** * A map of key/value pair options that have been setup for this printer * * @return Map of printer specific key value pair settings */ public Map<String, String> getOptions() { return options; } /** * @return True if this printer can handle commands/actions sent via {@link com.aevi.print.PrinterManager#sendAction(String, String)} */ public boolean canHandleCommands() { return canHandleCommands; } /** * @return True if this printer will report its status to {@link com.aevi.print.PrinterManager#status(String)} */ public boolean doesReportPrinterStatus() { return doesReportStatus; } /** * Returns a list of two letter (ISO 639) language codes that are supported by this printer * * @return A list of two letter ISO 639 codes */ public String[] getSupportedLanguages() { return supportedLanguages; } @Override public String toJson() { return JsonConverter.serialize(this); } public static PrinterSettings fromJson(String json) { return JsonConverter.deserialize(json, PrinterSettings.class); } }
///usr/bin/env jbang "$0" "$@" ; exit $? //JAVA 17 //DEPS com.drewnoakes:metadata-extractor:2.15.0 //DEPS info.picocli:picocli:4.5.2 //DEPS net.coobird:thumbnailator:0.4.13 import picocli.CommandLine; import picocli.CommandLine.Command; import picocli.CommandLine.ExitCode; import picocli.CommandLine.Parameters; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.Comparator; import java.util.Locale; import java.util.NoSuchElementException; import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import com.drew.imaging.ImageMetadataReader; import com.drew.imaging.ImageProcessingException; import com.drew.lang.GeoLocation; import com.drew.metadata.exif.ExifSubIFDDirectory; import com.drew.metadata.exif.GpsDirectory; import net.coobird.thumbnailator.Thumbnails; import net.coobird.thumbnailator.resizers.configurations.Dithering; import net.coobird.thumbnailator.resizers.configurations.Rendering; @Command(name = "createGalleries", description = "Creates a gallery page for every year with images.") public class CreateGalleries implements Callable<Integer> { record Image(Path path, LocalDateTime takenOn, GeoLocation location) { static Optional<Image> fromPath(Path path) { try { var metadata = ImageMetadataReader.readMetadata(Files.newInputStream(path)); var takenOn = metadata.getDirectoriesOfType(ExifSubIFDDirectory.class) .stream() .map(d -> d.getDateOriginal()) .filter(d -> d != null) .map(d -> d.toInstant().atZone(ZoneId.of("Europe/Berlin")).toLocalDateTime()) .findFirst().get(); var geolocation = metadata.getDirectoriesOfType(GpsDirectory.class) .stream() .map(GpsDirectory::getGeoLocation) .filter(l -> !(l == null || l.isZero())) .findFirst(); return Optional.of(new Image(path, takenOn, geolocation.orElse(null))); } catch (ImageProcessingException | NoSuchElementException | IOException e) { // We just ignore this image } return Optional.empty(); } void resize(int width, Path name) throws IOException { Thumbnails.of(path.toFile()) .width(width) .outputQuality(0.95) .outputFormat("jpg") .rendering(Rendering.QUALITY) .dithering(Dithering.DISABLE) .toFile(name.toFile()); } String store(Integer index, Path outputFolder) throws IOException, InterruptedException { var baseNameFormatter = DateTimeFormatter.ofPattern("yyyy/yyyy-MM-dd-'%d-%s.jpg'", Locale.ROOT); var baseNameFormat = baseNameFormatter.format(takenOn); var thumb = outputFolder.resolve(String.format(Locale.ROOT, baseNameFormat, index, "thumb")); resize(640, thumb); var full = outputFolder.resolve(String.format(Locale.ROOT, baseNameFormat, index, "full")); resize(1280, full); var linkToMaps = location == null ? "" : String.format(Locale.ROOT, "<a href='https://www.google.com/maps/search/?api=1&query=%1$.4f,%2$.4f'>Taken at %1$.4f, %2$.4f</a>", location.getLatitude(), location.getLongitude()); var ymd = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ROOT).format(takenOn); var template = """ <article class="thumb"> <a href="%s" class="image"><img src="%s" alt="" /></a> <h2>%s</h2> <p>%s</p> </article> """; return String .format(Locale.ROOT, template, full.getFileName().toString(), thumb.getFileName().toString(), ymd, linkToMaps); } } @Parameters(index = "0", description = "Input folder") private Path inputFolder; @Parameters(index = "1", description = "Output folder") private Path outputFolder; private void prepareOutputFolder(Collection<Integer> years) throws IOException { if (Files.isDirectory(outputFolder)) { Files.walkFileTree(outputFolder, new SimpleFileVisitor<>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } }); } Files.deleteIfExists(outputFolder); for (Integer year : years) { Files.createDirectories(outputFolder.resolve(Path.of(year.toString()))); } } @Override public Integer call() throws Exception { if (!Files.isDirectory(inputFolder)) { return ExitCode.USAGE; } var imagesPerYear = Files.walk(inputFolder) .filter( p -> Files.isRegularFile(p) && p.getFileName().toString().toLowerCase(Locale.ROOT).endsWith(".jpg")) .map(Image::fromPath) .filter(Optional::isPresent) .map(Optional::get) .sorted(Comparator.comparing(Image::takenOn)) .collect(Collectors.groupingBy(i -> i.takenOn().getYear())); prepareOutputFolder(imagesPerYear.keySet()); imagesPerYear.forEach((year, images) -> { var content = new StringBuilder(); var count = new AtomicInteger(0); images.forEach(i -> { try { var index = count.incrementAndGet(); content.append(i.store(index, outputFolder)); } catch (Exception e) { e.printStackTrace(); } }); try { Files.write(outputFolder.resolve(Path.of(year.toString(), "index.html")), PAGE_TEMPLATE .replaceAll("\\$year", year.toString()) .replaceAll("\\$content", content.toString()) .getBytes(StandardCharsets.UTF_8) ); } catch (IOException e) { throw new UncheckedIOException(e); } }); return ExitCode.OK; } public static void main(String[] args) { int exitCode = new CommandLine(new CreateGalleries()).execute(args); System.exit(exitCode); } private static final String PAGE_TEMPLATE = """ <!DOCTYPE HTML> <!-- Multiverse by HTML5 UP html5up.net | @ajlkn Free for personal and commercial use under the CCA 3.0 license (html5up.net/license) --> <html> <head> <title>Gallery $year | biking.michael-simons.eu</title> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no" /> <link rel="stylesheet" href="/css/gallery.css" /> <noscript><link rel="stylesheet" href="/css/noscript.css" /></noscript> </head> <body class="is-preload"> <div id="wrapper"> <header id="header"> <h1><a href="index.html"><strong>Gallery $year</a></h1> <nav><ul><li><a href="#footer" class="icon solid fa-info-circle">About</a></li></ul></nav> </header> <div id="main"> $content </div> <footer id="footer" class="panel"> <div class="inner split"> <div> <section> <h2>Part of biking.michael-simons.eu</h2> <p> This gallery is part of <a href="https://biking.michael-simons.eu">biking.michael-simons.eu</a>. The full source code of this application is available on <a href="https://github.com/michael-simons/biking2">GitHub</a>,\s including the generator used for building this gallery.<br /> Gallery created with <a href="https://github.com/michael-simons/biking2/blob/public/src/cli/CreateGalleries.java">CreateGalleries.java</a>, a small Java CLI powered by <a href="https://www.jbang.dev">jbang</a> and the mighty <a href="https://drewnoakes.com/code/exif/">Metadata Extractor</a>. </p> </section> <p class="copyright"> &copy; 2020 by Michael J. Simons, Design by <a href="http://html5up.net">HTML5 UP</a>.<br /> While the sourcecode of this application and the gallery generator is licensed under Apache-2.0 License, the images are published under <a href="https://creativecommons.org/licenses/by-nc-sa/4.0/">Attribution-NonCommercial-ShareAlike 4.0 International</a>. </p> </div> <div> <section> <h2>Follow me on ...</h2> <ul class="icons"> <li><a href="https://twitter.com/rotnroll666" class="icon brands fa-twitter"><span class="label">Twitter</span></a></li> <li><a href="https://github.com/michael-simons" class="icon brands fa-github"><span class="label">GitHub</span></a></li> <li><a href="https://www.linkedin.com/in/michael-simons-196712139/" class="icon brands fa-linkedin-in"><span class="label">LinkedIn</span></a></li> </ul> </section> </div> </div> </footer> </div> <script src="/js/gallery/jquery.min.js"></script> <script src="/js/gallery/jquery.poptrox.min.js"></script> <script src="/js/gallery/browser.min.js"></script> <script src="/js/gallery/breakpoints.min.js"></script> <script src="/js/gallery/util.js"></script> <script src="/js/gallery/main.js"></script> </body> </html> """; }
package dcraft.web.ui.tags; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import dcraft.filestore.CommonPath; import dcraft.hub.SiteInfo; import dcraft.lang.op.OperationContext; import dcraft.util.StringUtil; import dcraft.web.core.IOutputAdapter; import dcraft.web.core.WebContext; import dcraft.web.ui.UIElement; import dcraft.web.ui.UIWork; import dcraft.web.ui.adapter.DynamicOutputAdapter; import dcraft.xml.XElement; import dcraft.xml.XNode; public class Html extends MixIn { protected Map<String, String> hiddenattributes = null; protected List<XNode> hiddenchildren = null; public Map<String, String> getHiddenAttributes() { return this.hiddenattributes; } public List<XNode> getHiddenChildren() { return this.hiddenchildren; } @Override public UIElement getRoot() { return this; } public Html() { super("dc.Html"); } @Override public void expand(WeakReference<UIWork> work) { if (this.hasAttribute("Skeleton")) { String tpath = this.getAttribute("Skeleton"); CommonPath pp = new CommonPath(tpath + ".html"); IOutputAdapter sf = work.get().getContext().getSite().getWebsite().findFile(pp, work.get().getContext().isPreview()); if (sf instanceof DynamicOutputAdapter) { UIElement layout = ((DynamicOutputAdapter)sf).getSource((WebContext) work.get().getContext()); layout.mergeWithRoot(work, this, true); // merge content but don't actually add the root of skeleton itself } } super.expand(work); } @Override public void translate(WeakReference<UIWork> work, List<XNode> pnodes) { // don't change my identity until after the scripts run this.setName("html"); UIElement body = (UIElement) this.find("dc.Body"); if (body == null) { body = new Fragment(); body .with(new UIElement("h1") .withText("Missing Body Error!!") ); } String pc = this.getAttribute("PageClass"); if (StringUtil.isNotEmpty(pc)) body.withClass(pc); for (XNode rel : this.getChildren()) { if (! (rel instanceof XElement)) continue; XElement xel = (XElement) rel; if (xel.getName().equals("dc.Require") && xel.hasNotEmptyAttribute("Class")) body.withClass(xel.getAttribute("Class")); } List<XElement> reqstyles = this.selectAll("dc.RequireStyle"); // we only want head and body in translated document // set apart the rest for possible use later in dynamic out this.hiddenattributes = this.attributes; this.hiddenchildren = this.children; this.attributes = new HashMap<>(); this.children = new ArrayList<>(); // setup a parameter so that PageTitle is available to macros when executing above if (this.hiddenattributes != null) this.withParam("PageTitle", XNode.unquote(this.hiddenattributes.get("Title"))); //IOutputContext octx = work.get().getContext(); //if ((octx instanceof WebContext) && ((WebContext) octx).isDynamic()) { this .withAttribute("Title", "@val|PageTitle@ - @ctx|SiteTitle@"); // .with(body); if ((this.hiddenattributes != null) && this.hiddenattributes.containsKey("CmsPath")) this.withAttribute("CmsPath", this.hiddenattributes.get("CmsPath")); UIElement head = new UIElement("head"); head .with(new UIElement("meta") .withAttribute("chartset", "utf-8") ) .with(new UIElement("meta") .withAttribute("name", "format-detection") .withAttribute("content", "telephone=no") ) .with(new UIElement("meta") .withAttribute("name", "viewport") .withAttribute("content", "width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no") ) .with(new UIElement("meta") .withAttribute("name", "robots") .withAttribute("content", ("false".equals(this.getAttribute("Public", "true").toLowerCase())) ? "noindex,nofollow" : "index,follow") ) .with(new UIElement("title").withText("@val|PageTitle@ - @ctx|SiteTitle@")) ; SiteInfo site = work.get().getContext().getSite(); XElement domainwebconfig = site.getWebsite().getWebConfig(); String icon = this.getAttribute("Icon"); if (StringUtil.isEmpty(icon)) icon = this.getAttribute("Icon16"); if (StringUtil.isEmpty(icon) && (domainwebconfig != null)) icon = domainwebconfig.getAttribute("Icon"); if (StringUtil.isEmpty(icon)) icon = "/imgs/logo"; if (StringUtil.isNotEmpty(icon)) { // if full name then use as the 16x16 version if (icon.endsWith(".png")) { head .with(new UIElement("link") .withAttribute("type", "image/png") .withAttribute("rel", "shortcut icon") .withAttribute("href", icon) ) .with(new UIElement("link") .withAttribute("sizes", "16x16") .withAttribute("rel", "icon") .withAttribute("href", icon) ); } else { head .with(new UIElement("link") .withAttribute("type", "image/png") .withAttribute("rel", "shortcut icon") .withAttribute("href", icon + "16.png") ) .with(new UIElement("link") .withAttribute("sizes", "16x16") .withAttribute("rel", "icon") .withAttribute("href", icon + "16.png") ) .with(new UIElement("link") .withAttribute("sizes", "32x32") .withAttribute("rel", "icon") .withAttribute("href", icon + "32.png") ) .with(new UIElement("link") .withAttribute("sizes", "152x152") .withAttribute("rel", "icon") .withAttribute("href", icon + "152.png") ); } } icon = this.getAttribute("Icon32"); if (StringUtil.isNotEmpty(icon)) { head.with(new UIElement("link") .withAttribute("sizes", "32x32") .withAttribute("rel", "icon") .withAttribute("href", icon) ); } icon = this.getAttribute("Icon152"); if (StringUtil.isNotEmpty(icon)) { head.with(new UIElement("link") .withAttribute("sizes", "152x152") .withAttribute("rel", "icon") .withAttribute("href", icon) ); } /* * Essential Meta Tags * https://css-tricks.com/essential-meta-tags-social-media/ - images: Reconciling the guidelines for the image is simple: follow Facebook's recommendation of a minimum dimension of 1200x630 pixels (can go as low as 600 x 315) and an aspect ratio of 1.91:1, but adhere to Twitter's file size requirement of less than 1MB. - Title max 70 chars - Desc max 200 chars */ head .with(new UIElement("meta") .withAttribute("property", "og:title") .withAttribute("content", "@val|PageTitle@") ); if (this.hiddenattributes != null) { String keywords = XNode.unquote(this.hiddenattributes.get("Keywords")); if (StringUtil.isNotEmpty(keywords)) head .with(new UIElement("meta") .withAttribute("name", "keywords") .withAttribute("content", keywords) ); } if (this.hiddenattributes != null) { String desc = XNode.unquote(this.hiddenattributes.get("Description")); if (StringUtil.isNotEmpty(desc)) head .with(new UIElement("meta") .withAttribute("name", "description") .withAttribute("content", desc) ) .with(new UIElement("meta") .withAttribute("property", "og:description") .withAttribute("content", desc) ); } String indexurl = null; if ((domainwebconfig != null) && domainwebconfig.hasNotEmptyAttribute("IndexUrl")) indexurl = domainwebconfig.getAttribute("IndexUrl"); if (this.hiddenattributes != null) { String image = XNode.unquote(this.hiddenattributes.get("Image")); if (StringUtil.isEmpty(image) && (domainwebconfig != null) && domainwebconfig.hasNotEmptyAttribute("SiteImage")) image = domainwebconfig.getAttribute("SiteImage"); if (StringUtil.isNotEmpty(indexurl) && StringUtil.isNotEmpty(image)) head .with(new UIElement("meta") .withAttribute("property", "og:image") .withAttribute("content", this.getAttribute("Image", indexurl + image.substring(1))) ); } if (StringUtil.isNotEmpty(indexurl)) head .with(new UIElement("meta") .withAttribute("property", "og:url") .withAttribute("content", indexurl + work.get().getContext().getPath().toString().substring(1)) ); /* TODO review .with(new UIElement("meta") .withAttribute("name", "twitter:card") .withAttribute("content", "summary") ); */ /* TODO review, generalize so we can override if (domainwebconfig != null) { for (XElement gel : domainwebconfig.selectAll("Meta")) { UIElement m = new UIElement("meta"); for (Entry<String, String> mset : gel.getAttributes().entrySet()) m.withAttribute(mset.getKey(), mset.getValue()); head.with(m); } } */ // TODO research canonical url too boolean cachemode = site.getWebsite().isScriptStyleCached() && ! ((WebContext) work.get().getContext()).isPreview(); // --- styles --- List<String> styles = work.get().getContext().getSite().getWebsite().globalStyles(true, cachemode); for (String surl : styles) head.with(new UIElement("link") .withAttribute("type", "text/css") .withAttribute("rel", "stylesheet") .withAttribute("href", surl)); // add in styles specific for this page so we don't have to wait to see them load // TODO enhance so style doesn't double load for (XElement func : reqstyles) { if (func.hasAttribute("Path")) head.with(new UIElement("link") .withAttribute("type", "text/css") .withAttribute("rel", "stylesheet") .withAttribute("href", func.getAttribute("Path"))); } // --- scripts --- List<String> scripts = work.get().getContext().getSite().getWebsite().globalScripts(true, cachemode); for (String surl : scripts) head.with(new UIElement("script") .withAttribute("defer", "defer") .withAttribute("src", surl)); this .withAttribute("lang", OperationContext.get().getWorkingLocaleDefinition().getLanguage()) .withAttribute("dir", OperationContext.get().getWorkingLocaleDefinition().isRightToLeft() ? "rtl" : "ltr") .with(head) .with(body); super.translate(work, pnodes); } }
/*------------------------------------------------------------------------- * * Copyright (c) 2005-2014, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package org.postgresql.test.jdbc2; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.sql.*; import junit.framework.TestCase; import org.postgresql.util.PGInterval; import org.postgresql.test.TestUtil; public class IntervalTest extends TestCase { private Connection _conn; public IntervalTest(String name) { super(name); } protected void setUp() throws Exception { _conn = TestUtil.openDB(); TestUtil.createTable(_conn, "testinterval", "v interval"); TestUtil.createTable(_conn, "testdate", "v date"); } protected void tearDown() throws Exception { TestUtil.dropTable(_conn, "testinterval"); TestUtil.dropTable(_conn, "testdate"); TestUtil.closeDB(_conn); } public void testOnlineTests() throws SQLException { PreparedStatement pstmt = _conn.prepareStatement("INSERT INTO testinterval VALUES (?)"); pstmt.setObject(1, new PGInterval(2004, 13, 28, 0, 0, 43000.9013)); pstmt.executeUpdate(); pstmt.close(); Statement stmt = _conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT v FROM testinterval"); assertTrue(rs.next()); PGInterval pgi = (PGInterval)rs.getObject(1); assertEquals(2005, pgi.getYears()); assertEquals(1, pgi.getMonths()); assertEquals(28, pgi.getDays()); assertEquals(11, pgi.getHours()); assertEquals(56, pgi.getMinutes()); assertEquals(40.9013, pgi.getSeconds(), 0.000001); assertTrue(!rs.next()); rs.close(); stmt.close(); } public void testStringToIntervalCoercion() throws SQLException { Statement stmt = _conn.createStatement(); stmt.executeUpdate(TestUtil.insertSQL("testdate", "'2010-01-01'")); stmt.executeUpdate(TestUtil.insertSQL("testdate", "'2010-01-02'")); stmt.executeUpdate(TestUtil.insertSQL("testdate", "'2010-01-04'")); stmt.executeUpdate(TestUtil.insertSQL("testdate", "'2010-01-05'")); stmt.close(); PreparedStatement pstmt = _conn .prepareStatement("SELECT v FROM testdate WHERE v < (?::timestamp with time zone + ? * ?::interval) ORDER BY v"); pstmt.setObject(1, makeDate(2010, 1, 1)); pstmt.setObject(2, Integer.valueOf(2)); pstmt.setObject(3, "1 day"); ResultSet rs = pstmt.executeQuery(); assertNotNull(rs); java.sql.Date d; assertTrue(rs.next()); d = rs.getDate(1); assertNotNull(d); assertEquals(makeDate(2010, 1, 1), d); assertTrue(rs.next()); d = rs.getDate(1); assertNotNull(d); assertEquals(makeDate(2010, 1, 2), d); assertFalse(rs.next()); rs.close(); pstmt.close(); } public void testIntervalToStringCoercion() throws SQLException { PGInterval interval = new PGInterval("1 year 3 months"); String coercedStringValue = interval.toString(); assertEquals("1 years 3 mons 0 days 0 hours 0 mins 0.00 secs", coercedStringValue); } public void testDaysHours() throws SQLException { Statement stmt = _conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT '101:12:00'::interval"); assertTrue(rs.next()); PGInterval i = (PGInterval)rs.getObject(1); // 8.1 servers store hours and days separately. if (TestUtil.haveMinimumServerVersion(_conn, "8.1")) { assertEquals(0, i.getDays()); assertEquals(101, i.getHours()); } else { assertEquals(4, i.getDays()); assertEquals(5, i.getHours()); } assertEquals(12, i.getMinutes()); } public void testAddRounding() { PGInterval pgi = new PGInterval(0, 0, 0, 0, 0, 0.6006); Calendar cal = Calendar.getInstance(); long origTime = cal.getTime().getTime(); pgi.add(cal); long newTime = cal.getTime().getTime(); assertEquals(601, newTime - origTime); pgi.setSeconds(-0.6006); pgi.add(cal); assertEquals(origTime, cal.getTime().getTime()); } public void testOfflineTests() throws Exception { PGInterval pgi = new PGInterval(2004, 4, 20, 15, 57, 12.1); assertEquals(2004, pgi.getYears()); assertEquals(4, pgi.getMonths()); assertEquals(20, pgi.getDays()); assertEquals(15, pgi.getHours()); assertEquals(57, pgi.getMinutes()); assertEquals(12.1, pgi.getSeconds(), 0); PGInterval pgi2 = new PGInterval("@ 2004 years 4 mons 20 days 15 hours 57 mins 12.1 secs"); assertEquals(pgi, pgi2); // Singular units PGInterval pgi3 = new PGInterval("@ 2004 year 4 mon 20 day 15 hour 57 min 12.1 sec"); assertEquals(pgi, pgi3); PGInterval pgi4 = new PGInterval("2004 years 4 mons 20 days 15:57:12.1"); assertEquals(pgi, pgi4); // Ago test pgi = new PGInterval("@ 2004 years 4 mons 20 days 15 hours 57 mins 12.1 secs ago"); assertEquals(-2004, pgi.getYears()); assertEquals(-4, pgi.getMonths()); assertEquals(-20, pgi.getDays()); assertEquals(-15, pgi.getHours()); assertEquals(-57, pgi.getMinutes()); assertEquals(-12.1, pgi.getSeconds(), 0); // Char test pgi = new PGInterval("@ +2004 years -4 mons +20 days -15 hours +57 mins -12.1 secs"); assertEquals(2004, pgi.getYears()); assertEquals(-4, pgi.getMonths()); assertEquals(20, pgi.getDays()); assertEquals(-15, pgi.getHours()); assertEquals(57, pgi.getMinutes()); assertEquals(-12.1, pgi.getSeconds(), 0); } Calendar getStartCalendar() { Calendar cal = new GregorianCalendar(); cal.set(Calendar.YEAR, 2005); cal.set(Calendar.MONTH, 4); cal.set(Calendar.DAY_OF_MONTH, 29); cal.set(Calendar.HOUR_OF_DAY, 15); cal.set(Calendar.MINUTE, 35); cal.set(Calendar.SECOND, 42); cal.set(Calendar.MILLISECOND, 100); return cal; } public void testCalendar() throws Exception { Calendar cal = getStartCalendar(); PGInterval pgi = new PGInterval("@ 1 year 1 mon 1 day 1 hour 1 minute 1 secs"); pgi.add(cal); assertEquals(2006, cal.get(Calendar.YEAR)); assertEquals(5, cal.get(Calendar.MONTH)); assertEquals(30, cal.get(Calendar.DAY_OF_MONTH)); assertEquals(16, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(36, cal.get(Calendar.MINUTE)); assertEquals(43, cal.get(Calendar.SECOND)); assertEquals(100, cal.get(Calendar.MILLISECOND)); pgi = new PGInterval("@ 1 year 1 mon 1 day 1 hour 1 minute 1 secs ago"); pgi.add(cal); assertEquals(2005, cal.get(Calendar.YEAR)); assertEquals(4, cal.get(Calendar.MONTH)); assertEquals(29, cal.get(Calendar.DAY_OF_MONTH)); assertEquals(15, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(35, cal.get(Calendar.MINUTE)); assertEquals(42, cal.get(Calendar.SECOND)); assertEquals(100, cal.get(Calendar.MILLISECOND)); cal = getStartCalendar(); pgi = new PGInterval("@ 1 year -23 hours -3 mins -3.30 secs"); pgi.add(cal); assertEquals(2006, cal.get(Calendar.YEAR)); assertEquals(4, cal.get(Calendar.MONTH)); assertEquals(28, cal.get(Calendar.DAY_OF_MONTH)); assertEquals(16, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(32, cal.get(Calendar.MINUTE)); assertEquals(38, cal.get(Calendar.SECOND)); assertEquals(800, cal.get(Calendar.MILLISECOND)); pgi = new PGInterval("@ 1 year -23 hours -3 mins -3.30 secs ago"); pgi.add(cal); assertEquals(2005, cal.get(Calendar.YEAR)); assertEquals(4, cal.get(Calendar.MONTH)); assertEquals(29, cal.get(Calendar.DAY_OF_MONTH)); assertEquals(15, cal.get(Calendar.HOUR_OF_DAY)); assertEquals(35, cal.get(Calendar.MINUTE)); assertEquals(42, cal.get(Calendar.SECOND)); assertEquals(100, cal.get(Calendar.MILLISECOND)); } public void testDate() throws Exception { Date date = getStartCalendar().getTime(); Date date2 = getStartCalendar().getTime(); PGInterval pgi = new PGInterval("@ +2004 years -4 mons +20 days -15 hours +57 mins -12.1 secs"); pgi.add(date); PGInterval pgi2 = new PGInterval("@ +2004 years -4 mons +20 days -15 hours +57 mins -12.1 secs ago"); pgi2.add(date); assertEquals(date2, date); } public void testISODate() throws Exception { Date date = getStartCalendar().getTime(); Date date2 = getStartCalendar().getTime(); PGInterval pgi = new PGInterval("+2004 years -4 mons +20 days -15:57:12.1"); pgi.add(date); PGInterval pgi2 = new PGInterval("-2004 years 4 mons -20 days 15:57:12.1"); pgi2.add(date); assertEquals(date2, date); } private java.sql.Date makeDate(int y, int m, int d) { return new java.sql.Date(y - 1900, m - 1, d); } }
/** * Copyright (c) 2003-2017 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.site.tool.helper.participant.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.Predicate; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.validator.routines.EmailValidator; import org.sakaiproject.accountvalidator.logic.ValidationLogic; import org.sakaiproject.authz.api.AuthzGroup; import org.sakaiproject.authz.api.AuthzGroupService; import org.sakaiproject.authz.api.AuthzPermissionException; import org.sakaiproject.authz.api.GroupNotDefinedException; import org.sakaiproject.authz.api.Role; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.event.api.UsageSessionService; import org.sakaiproject.event.cover.EventTrackingService; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.site.util.Participant; import org.sakaiproject.site.util.SiteConstants; import org.sakaiproject.site.util.SiteParticipantHelper; import org.sakaiproject.site.util.SiteTypeUtil; import org.sakaiproject.sitemanage.api.SiteHelper; import org.sakaiproject.sitemanage.api.UserNotificationProvider; import org.sakaiproject.tool.api.SessionManager; import org.sakaiproject.tool.api.Tool; import org.sakaiproject.tool.api.ToolManager; import org.sakaiproject.tool.api.ToolSession; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserAlreadyDefinedException; import org.sakaiproject.user.api.UserDirectoryService; import org.sakaiproject.user.api.UserEdit; import org.sakaiproject.user.api.UserIdInvalidException; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.api.UserPermissionException; import org.sakaiproject.userauditservice.api.UserAuditRegistration; import org.sakaiproject.userauditservice.api.UserAuditService; import org.sakaiproject.util.api.PasswordFactory; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import uk.org.ponder.messageutil.MessageLocator; import uk.org.ponder.messageutil.TargettedMessage; import uk.org.ponder.messageutil.TargettedMessageList; /** * * @author * */ @Slf4j public class SiteAddParticipantHandler { private static final String EMAIL_CHAR = "@"; public SiteService siteService = null; public AuthzGroupService authzGroupService = null; public ToolManager toolManager = null; public SessionManager sessionManager = null; @Setter private PasswordFactory passwordFactory; public ServerConfigurationService serverConfigurationService; private final String HELPER_ID = "sakai.tool.helper.id"; private static final UserAuditRegistration userAuditRegistration = (UserAuditRegistration) ComponentManager.get("org.sakaiproject.userauditservice.api.UserAuditRegistration.sitemanage"); // SAK-29711 private static final String SAK_PROP_INVALID_EMAIL_DOMAINS = "invalidEmailInIdAccountString"; private static List<String> invalidDomains; public MessageLocator messageLocator; private UserNotificationProvider notiProvider; // Tool session attribute name used to schedule a whole page refresh. public static final String ATTR_TOP_REFRESH = "sakai.vppa.top.refresh"; public TargettedMessageList targettedMessageList; public void setTargettedMessageList(TargettedMessageList targettedMessageList) { this.targettedMessageList = targettedMessageList; } public Site site = null; public String csrfToken = null; public String getCsrfToken() { Object sessionAttr = sessionManager.getCurrentSession().getAttribute(UsageSessionService.SAKAI_CSRF_SESSION_ATTRIBUTE); return (sessionAttr!=null)?sessionAttr.toString():""; } public String officialAccountParticipant = null; public String getOfficialAccountParticipant() { return officialAccountParticipant; } private UserDirectoryService userDirectoryService; public void setUserDirectoryService(UserDirectoryService userDirectoryService) { this.userDirectoryService = userDirectoryService; } public void setCsrfToken(String csrfToken) { this.csrfToken = csrfToken; } public void setOfficialAccountParticipant(String officialAccountParticipant) { this.officialAccountParticipant = officialAccountParticipant; } public String nonOfficialAccountParticipant = null; public String getNonOfficialAccountParticipant() { return nonOfficialAccountParticipant; } public void setNonOfficialAccountParticipant( String nonOfficialAccountParticipant) { this.nonOfficialAccountParticipant = nonOfficialAccountParticipant; } ValidationLogic validationLogic; public void setValidationLogic(ValidationLogic validationLogic) { this.validationLogic = validationLogic; } // for eids inside this list, don't look them up as email ids public List<String> officialAccountEidOnly = new ArrayList<>(); public List<String> getOfficialAccountEidOnly() { return officialAccountEidOnly; } public void setOfficialAccountEidOnly(List<String> officialAccountEidOnly) { this.officialAccountEidOnly = officialAccountEidOnly; } /*whether the role choice is for same role or different role */ public String roleChoice = "sameRole"; public String getRoleChoice() { return roleChoice; } public void setRoleChoice(String roleChoice) { this.roleChoice = roleChoice; } /*whether the same role used for all users */ public String sameRoleChoice = null; public String getSameRoleChoice() { return sameRoleChoice; } public void setSameRoleChoice(String sameRoleChoice) { this.sameRoleChoice = sameRoleChoice; } /*status choice */ public String statusChoice = "active"; public String getStatusChoice() { return statusChoice; } public void setStatusChoice(String sChoice) { this.statusChoice = sChoice; } /* the email notification setting */ public String emailNotiChoice = Boolean.FALSE.toString(); public String getEmailNotiChoice() { return emailNotiChoice; } public void setEmailNotiChoice(String emailNotiChoice) { this.emailNotiChoice = emailNotiChoice; } /** realm for the site **/ public AuthzGroup realm = null; public String siteId = null; /** the role set for the site **/ public List<Role> roles = new ArrayList<>(); public List<Role> getRoles() { if (roles.isEmpty()) init(); Collections.sort(roles); return roles; } public void setRoles (List<Role> roles) { this.roles = roles; } /** the user selected */ public List<UserRoleEntry> userRoleEntries = new ArrayList<>(); public String getUserRole(String userId) { String rv = ""; if (userRoleEntries != null) { for (UserRoleEntry entry:userRoleEntries) { if (entry.userEId.equals(userId)) { rv = entry.role; } } } return rv; } public List<String> getUsers() { List<String> rv = new ArrayList<>(); if (userRoleEntries != null) { for (UserRoleEntry entry:userRoleEntries) { rv.add(entry.userEId); } } return rv; } /** * Initialization method, just gets the current site in preparation for other calls */ public void init() { if (site == null) { try { siteId = sessionManager.getCurrentToolSession() .getAttribute(HELPER_ID + ".siteId").toString(); } catch (java.lang.NullPointerException npe) { log.error( "Site ID wasn't set in the helper call!!", npe ); } if (siteId == null) { siteId = toolManager.getCurrentPlacement().getContext(); } try { site = siteService.getSite(siteId); realm = authzGroupService.getAuthzGroup(siteService.siteReference(siteId)); // SAK-23257 roles = SiteParticipantHelper.getAllowedRoles( site.getType(), realm.getRoles() ); } catch (IdUnusedException | GroupNotDefinedException e) { log.error( "The siteId we were given was bogus", e ); } } // SAK-29711 invalidDomains = Arrays.asList( ArrayUtils.nullToEmpty( org.sakaiproject.component.cover.ServerConfigurationService.getStrings( SAK_PROP_INVALID_EMAIL_DOMAINS ) ) ); } /** * get the site title * @return */ public String getSiteTitle() { String rv = ""; if (site == null) init(); if (site != null) rv = site.getTitle(); return rv; } /** * is current site a course site? * @return */ public boolean isCourseSite() { return site != null ? SiteTypeUtil.isCourseSite(site.getType()): false; } /** * get the configuration string value * @param param * @return */ public String getServerConfigurationString(String param) { return getServerConfigurationString(param, null); } /** * get the configuration string value * @param param * @param defaultValue * @return */ public String getServerConfigurationString(String param, String defaultValue) { return serverConfigurationService.getString(param, defaultValue); } /** * Allows the Cancel button to return control to the tool calling this helper * @return */ public String processCancel() { ToolSession session = sessionManager.getCurrentToolSession(); session.setAttribute(ATTR_TOP_REFRESH, Boolean.TRUE); // Go to Site Info landing page on 'Cancel' setNextPage(SiteConstants.SITE_INFO_TEMPLATE_INDEX); resetTargettedMessageList(); reset(); return "done"; } /* * Utility method; sets the template index (in the tool session) of the desired page to transfer the user to. */ private void setNextPage(String nextPageTemplateIndex) { ToolSession session = sessionManager.getCurrentToolSession(); session.setAttribute(SiteConstants.STATE_TEMPLATE_INDEX, nextPageTemplateIndex); } private boolean validCsrfToken() { return StringUtils.equals(csrfToken, getCsrfToken()); } /** * get role choice and go to difference html page based on that * @return */ public String processGetParticipant() { if (!validCsrfToken()) { targettedMessageList.addMessage(new TargettedMessage("java.badcsrftoken", null, TargettedMessage.SEVERITY_ERROR)); return ""; } // reset errors resetTargettedMessageList(); // reset user list resetUserRolesEntries(); checkAddParticipant(); if (targettedMessageList != null && targettedMessageList.size() > 0) { // there is error, remain on the same page return ""; } else { // go to next step return roleChoice; } } private void resetTargettedMessageList() { targettedMessageList.clear(); } private void resetUserRolesEntries() { userRoleEntries = new ArrayList<>(); } /** * get the same role choice and continue * @return */ public String processSameRoleContinue() { if (!validCsrfToken()) { targettedMessageList.addMessage(new TargettedMessage("java.badcsrftoken", null, TargettedMessage.SEVERITY_ERROR)); return null; } targettedMessageList.clear(); if (sameRoleChoice == null) { targettedMessageList.addMessage(new TargettedMessage("java.pleasechoose", null, TargettedMessage.SEVERITY_ERROR)); return null; } else { resetTargettedMessageList(); // if user doesn't have full rights, don't let him add one with site update if (!authzGroupService.allowUpdate("/site/" + siteId)) { if (realm == null) init(); Role r = realm.getRole(sameRoleChoice); if (r != null && r.isAllowed("site.upd")) { targettedMessageList.addMessage(new TargettedMessage("java.roleperm", new Object[] { sameRoleChoice }, TargettedMessage.SEVERITY_ERROR)); return null; } } if (userRoleEntries != null) { for (UserRoleEntry entry:userRoleEntries) { entry.role = sameRoleChoice; } } return "continue"; } } /** * back to the first add participant page * @return */ public String processSameRoleBack() { resetTargettedMessageList(); return "back"; } /** * get the different role choice and continue * @return */ public String processDifferentRoleContinue() { if (!validCsrfToken()) { targettedMessageList.addMessage(new TargettedMessage("java.badcsrftoken", null, TargettedMessage.SEVERITY_ERROR)); return null; } resetTargettedMessageList(); if (!authzGroupService.allowUpdate("/site/" + siteId)) { Set<String> roles = new HashSet<>(); for (UserRoleEntry entry : userRoleEntries) roles.add(entry.role); for (String rolename: roles) { Role r = realm.getRole(rolename); if (r != null && r.isAllowed("site.upd")) { targettedMessageList.addMessage(new TargettedMessage("java.roleperm", new Object[] { rolename }, TargettedMessage.SEVERITY_ERROR)); return null; } } } return "continue"; } /** * back to the first add participant page * @return */ public String processDifferentRoleBack() { resetTargettedMessageList(); return "back"; } /** * get the email noti choice and continue * @return */ public String processEmailNotiContinue() { if (!validCsrfToken()) { targettedMessageList.addMessage(new TargettedMessage("java.badcsrftoken", null, TargettedMessage.SEVERITY_ERROR)); return ""; } resetTargettedMessageList(); return "continue"; } /** * back to the previous role choice page * @return */ public String processEmailNotiBack() { resetTargettedMessageList(); if ("sameRole".equals(roleChoice)) { return "backSameRole"; } else { return "backDifferentRole"; } } /** * whether the eId is considered of official account * @param eId * @return */ private boolean isOfficialAccount(String eId) { return !eId.contains( EMAIL_CHAR ); } /* * Given a list of user eids, add users to realm If the user account does * not exist yet inside the user directory, assign role to it @return A list * of eids for successfully added users */ private List<String> addUsersRealm( boolean notify) { // return the list of user eids for successfully added user List<String> addedUserEIds = new ArrayList<>(); // this list contains all added user, their roles, and active status List<String> addedUserInfos = new ArrayList<>(); if (userRoleEntries != null && !userRoleEntries.isEmpty()) { if (site == null) init(); if (site != null) { // get realm object String realmId = site.getReference(); try { AuthzGroup realmEdit = authzGroupService.getAuthzGroup(realmId); boolean allowUpdate = authzGroupService.allowUpdate(realmId); Set<String>okRoles = new HashSet<>(); // List used for user auditing List<String[]> userAuditList = new ArrayList<>(); for (UserRoleEntry entry: userRoleEntries) { String eId = entry.userEId; String role =entry.role; // this check should never trigger, as we check it earlier // however I'm worried about users manually calling this page directly if (!allowUpdate && !okRoles.contains(role)) { Role r = realmEdit.getRole(role); if (r != null && r.isAllowed("site.upd")) { targettedMessageList.addMessage(new TargettedMessage("java.roleperm", new Object[] { role }, TargettedMessage.SEVERITY_ERROR)); continue; } okRoles.add(role); } // SAK-23257 - display an error message if the new role is in the restricted role list String siteType = site.getType(); Role r = realmEdit.getRole( role ); if( !SiteParticipantHelper.getAllowedRoles( siteType, realm.getRoles() ).contains( r ) ) { targettedMessageList.addMessage( new TargettedMessage( "java.roleperm", new Object[] { role }, TargettedMessage.SEVERITY_ERROR ) ); continue; } try { User user = userDirectoryService.getUserByEid(eId); if (authzGroupService.allowUpdate(realmId) || siteService.allowUpdateSiteMembership(site.getId())) { realmEdit.addMember(user.getId(), role, statusChoice.equals("active"), false); addedUserEIds.add(eId); addedUserInfos.add("uid=" + user.getId() + ";role=" + role + ";active=" + statusChoice.equals("active") + ";provided=false;siteId=" + site.getId()); // Add the user to the list for the User Auditing Event Logger String currentUserId = userDirectoryService.getUserEid(sessionManager.getCurrentSessionUserId()); String[] userAuditString = {site.getId(),eId,role,UserAuditService.USER_AUDIT_ACTION_ADD,userAuditRegistration.getDatabaseSourceKey(),currentUserId}; userAuditList.add(userAuditString); // send notification if (notify) { // send notification email notiProvider.notifyAddedParticipant(!isOfficialAccount(eId), user, site); } } } catch (UserNotDefinedException e) { targettedMessageList.addMessage(new TargettedMessage("java.account", new Object[] { eId }, TargettedMessage.SEVERITY_INFO)); log.debug(this + ".addUsersRealm: cannot find user with eid= " + eId, e); } // try } // for try { authzGroupService.save(realmEdit); // do the audit logging - Doing this in one bulk call to the database will cause the actual audit stamp to be off by maybe 1 second at the most // but seems to be a better solution than call this multiple time for every update if (!userAuditList.isEmpty()) { userAuditRegistration.addToUserAuditing(userAuditList); } // post event about adding participant EventTrackingService.post(EventTrackingService.newEvent(SiteService.SECURE_UPDATE_SITE_MEMBERSHIP, realmEdit.getId(),false)); // check the configuration setting, whether logging membership change at individual level is allowed if (serverConfigurationService.getBoolean(SiteHelper.WSETUP_TRACK_USER_MEMBERSHIP_CHANGE, true)) { for(String userInfo : addedUserInfos) { // post the add event for each added participant EventTrackingService.post(EventTrackingService.newEvent(SiteService.EVENT_USER_SITE_MEMBERSHIP_ADD, userInfo, true)); } } } catch (GroupNotDefinedException ee) { targettedMessageList.addMessage(new TargettedMessage("java.realm",new Object[] { realmId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".addUsersRealm: cannot find realm for" + realmId, ee); } catch (AuthzPermissionException ee) { targettedMessageList.addMessage(new TargettedMessage("java.permeditsite",new Object[] { realmId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".addUsersRealm: don't have permission to edit realm " + realmId, ee); } } catch (GroupNotDefinedException eee) { targettedMessageList.addMessage(new TargettedMessage("java.realm",new Object[] { realmId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".addUsersRealm: cannot find realm for " + realmId, eee); } catch (Exception eee) { log.warn(this + ".addUsersRealm: " + eee.getMessage() + " realmId=" + realmId, eee); } } } return addedUserEIds; } // addUsersRealm /** * get the confirm choice and continue * @return */ public String processConfirmContinue() { if (!validCsrfToken()) { targettedMessageList.addMessage(new TargettedMessage("java.badcsrftoken", null, TargettedMessage.SEVERITY_ERROR)); } List<String> validationUsers = new ArrayList<>(); resetTargettedMessageList(); if (site == null) init(); for (UserRoleEntry entry:userRoleEntries) { String eId = entry.userEId; if (isOfficialAccount(eId)) { // if this is a officialAccount } else { // if this is an nonOfficialAccount try { userDirectoryService.getUserByEid(eId); } catch (UserNotDefinedException e) { // if there is no such user yet, add the user try { UserEdit uEdit = userDirectoryService .addUser(null, eId); // set email address uEdit.setEmail(eId); // set the guest user type uEdit.setType("guest"); // set the guest first name String firstName = entry.firstName; if (firstName != null && firstName.length() > 0) uEdit.setFirstName(entry.firstName); // set the guest last name String lastName = entry.firstName; if (lastName != null && lastName.length() > 0) uEdit.setLastName(entry.lastName); String pw = passwordFactory.generatePassword(); uEdit.setPassword(pw); // and save userDirectoryService.commitEdit(uEdit); boolean notifyNewUserEmail = (getServerConfigurationString("notifyNewUserEmail", Boolean.TRUE.toString())) .equalsIgnoreCase(Boolean.TRUE.toString()); boolean validateUsers = serverConfigurationService.getBoolean("siteManage.validateNewUsers", true); if (notifyNewUserEmail && !validateUsers) { notiProvider.notifyNewUserEmail(uEdit, pw, site); } else if (notifyNewUserEmail && validateUsers) { validationUsers.add(uEdit.getId()); } } catch (UserIdInvalidException ee) { targettedMessageList.addMessage(new TargettedMessage("java.isinval",new Object[] { eId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".doAdd_participant: id " + eId + " is invalid", ee); } catch (UserAlreadyDefinedException ee) { targettedMessageList.addMessage(new TargettedMessage("java.beenused",new Object[] { eId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".doAdd_participant: id " + eId + " has been used", ee); } catch (UserPermissionException ee) { targettedMessageList.addMessage(new TargettedMessage("java.haveadd",new Object[] { eId }, TargettedMessage.SEVERITY_INFO)); log.warn(this + ".doAdd_participant: You don't have permission to add " + eId, ee); } } } } // batch add and updates the successful added list List<String> addedParticipantEIds = addUsersRealm(Boolean.parseBoolean(emailNotiChoice)); // update the not added user list String notAddedOfficialAccounts = ""; String notAddedNonOfficialAccounts = ""; for (UserRoleEntry entry:userRoleEntries) { String iEId = entry.userEId; if (!addedParticipantEIds.contains(iEId)) { if (isOfficialAccount(iEId)) { // no email in eid notAddedOfficialAccounts = notAddedOfficialAccounts .concat(iEId + "\n"); } else { // email in eid notAddedNonOfficialAccounts = notAddedNonOfficialAccounts .concat(iEId + "\n"); } } } //finally send any account validations for( String userId : validationUsers ) { validationLogic.createValidationAccount(userId, true); } if (!addedParticipantEIds.isEmpty() && (!"".equals(notAddedOfficialAccounts) || !"".equals(notAddedNonOfficialAccounts))) { // at lease one officialAccount account or an nonOfficialAccount // account added, and there are also failures targettedMessageList.addMessage(new TargettedMessage("java.allusers", null, TargettedMessage.SEVERITY_INFO)); } if (targettedMessageList.size() == 0) { // time to reset user inputs reset(); // After succesfully adding participants, return to the 'Manage Participants' UI rather than whatever the previously selected tab was setNextPage(SiteConstants.MANAGE_PARTICIPANTS_TEMPLATE_INDEX); return "done"; } else { // there is error return "errorWithAddingParticipants"; } } /** * back to the email notification page * @return */ public String processConfirmBack() { resetTargettedMessageList(); return "back"; } /** * Gets the current tool * @return Tool */ public Tool getCurrentTool() { return toolManager.getCurrentTool(); } /** check the participant input **/ private void checkAddParticipant() { // get the participants to be added int i; if (site == null) init(); List<Participant> pList = new ArrayList<>(); HashSet<String> existingUsers = new HashSet<>(); // accept officialAccounts and/or nonOfficialAccount account names String officialAccounts; String nonOfficialAccounts; // check that there is something with which to work officialAccounts = StringUtils.trimToNull(officialAccountParticipant); nonOfficialAccounts = StringUtils.trimToNull(nonOfficialAccountParticipant); String updatedOfficialAccountParticipant = ""; String updatedNonOfficialAccountParticipant = ""; // if there is no eid or nonOfficialAccount entered if (officialAccounts == null && nonOfficialAccounts == null) { targettedMessageList.addMessage(new TargettedMessage("java.guest", null, TargettedMessage.SEVERITY_ERROR)); } String at = "@"; if (officialAccounts != null) { // adding officialAccounts String[] officialAccountArray = officialAccounts .split("\r\n"); for (i = 0; i < officialAccountArray.length; i++) { String currentOfficialAccount = officialAccountArray[i]; String officialAccount = StringUtils.trimToNull(currentOfficialAccount.replaceAll("[\t\r\n]", "")); // if there is some text, try to use it if (officialAccount != null) { // automatically add nonOfficialAccount account Participant participant = new Participant(); User u = null; StringBuilder eidsForAllMatches = new StringBuilder(); StringBuilder eidsForAllMatchesAlertBuffer = new StringBuilder(); if (!officialAccount.contains( at )) { // is not of email format, then look up by eid only try { // look for user based on eid first u = userDirectoryService.getUserByEid(officialAccount); } catch (UserNotDefinedException e) { log.debug(this + ".checkAddParticipant: " + messageLocator.getMessage("java.username",officialAccount), e); } } else { // is email. Need to lookup by both eid and email address try { // look for user based on eid first u = userDirectoryService.getUserByEid(officialAccount); } catch (UserNotDefinedException e) { log.debug(this + ".checkAddParticipant: " + messageLocator.getMessage("java.username",officialAccount), e); } //Changed user lookup to satisfy BSP-1010 (jholtzman) // continue to look for the user by their email address // if the email address is not marked as eid only if (!officialAccountEidOnly.contains(officialAccount)) { Collection<User> usersWithEmail = userDirectoryService.findUsersByEmail(officialAccount); if(usersWithEmail != null) { if(usersWithEmail.isEmpty()) { // If the collection is empty, we didn't find any users with this email address log.debug("Unable to find users with email " + officialAccount); } else if (usersWithEmail.size() == 1) { if (u == null) { // We found one user with this email address. Use it. u = (User)usersWithEmail.iterator().next(); } } else if (!usersWithEmail.isEmpty()) { // If we have multiple users with this email address, expand the list with all matching user's eids and let the instructor choose from them log.debug("Found multiple user with email " + officialAccount); // multiple matches for (User user : usersWithEmail) { String displayId = user.getDisplayId(); eidsForAllMatches.append(displayId).append("\n"); eidsForAllMatchesAlertBuffer.append(displayId).append(", "); // this is to mark the eid so that it won't be used again for email lookup in the future officialAccountEidOnly.add(user.getEid()); } // trim the alert message String eidsForAllMatchesAlert = eidsForAllMatchesAlertBuffer.toString(); if (eidsForAllMatchesAlert.endsWith(", ")) { eidsForAllMatchesAlert = eidsForAllMatchesAlert.substring(0, eidsForAllMatchesAlert.length()-2); } // update ui input updateOfficialAccountParticipant(officialAccount, u, eidsForAllMatches.toString()); // show alert message targettedMessageList.addMessage(new TargettedMessage("java.username.multiple", new Object[] { officialAccount, eidsForAllMatchesAlert }, TargettedMessage.SEVERITY_INFO)); } } } } if (u != null) { log.debug("found user with eid " + u.getEid()); if (site != null && site.getUserRole(u.getId()) != null) { // user already exists in the site, cannot be added // again existingUsers.add(officialAccount); } else { participant.name = u.getDisplayName(); participant.uniqname = u.getEid(); participant.active = true; pList.add(participant); } // update the userRoleTable if (!getUsers().contains(officialAccount) && !existingUsers.contains(officialAccount)) { userRoleEntries.add(new UserRoleEntry(u.getEid(), "")); // not existed user, update account updatedOfficialAccountParticipant += currentOfficialAccount+ "\n"; } } else if (eidsForAllMatches.length() == 0) { // not valid user targettedMessageList.addMessage(new TargettedMessage("java.username", new Object[] { officialAccount }, TargettedMessage.SEVERITY_ERROR)); } } } } // officialAccounts if (nonOfficialAccounts != null) { String[] nonOfficialAccountArray = nonOfficialAccounts.split("\r\n"); for (i = 0; i < nonOfficialAccountArray.length; i++) { String currentNonOfficialAccount = nonOfficialAccountArray[i]; String nonOfficialAccountAll = StringUtils.trimToNull(currentNonOfficialAccount.replaceAll("[\t\r\n]", "")); //there could be an empty line SAK-22497 if (nonOfficialAccountAll == null) { continue; } // the format of per user entry is: email address,first name,last name // comma separated String[] nonOfficialAccountParts = nonOfficialAccountAll.split(","); if (nonOfficialAccountParts.length > 3) { // if the input contains more fields than "email address,first name,last name", show an alert targettedMessageList.addMessage(new TargettedMessage("add.multiple.nonofficial.alert.more", new Object[] {nonOfficialAccountAll}, TargettedMessage.SEVERITY_ERROR)); break; } String userEid = nonOfficialAccountParts[0].trim(); // get last name, if any String userLastName = ""; if (nonOfficialAccountParts.length > 1) { userLastName = nonOfficialAccountParts[1].trim(); } // get first name, if any String userFirstName = ""; if (nonOfficialAccountParts.length > 2) { userFirstName = nonOfficialAccountParts[2].trim(); } // remove the trailing dots while (userEid != null && userEid.endsWith(".")) { userEid = userEid.substring(0, userEid.length() - 1); } if (userEid != null && userEid.length() > 0) { final String[] parts = userEid.split(at); if (!userEid.contains( at )) { // must be a valid email address targettedMessageList.addMessage(new TargettedMessage("java.emailaddress", new Object[] { userEid }, TargettedMessage.SEVERITY_ERROR)); } else if ((parts.length != 2) || (parts[0].length() == 0)) { // must have both id and address part targettedMessageList.addMessage(new TargettedMessage("java.notemailid", new Object[] { userEid }, TargettedMessage.SEVERITY_ERROR)); } else if (!EmailValidator.getInstance().isValid(userEid)) { targettedMessageList.addMessage(new TargettedMessage("java.emailaddress", new Object[] { userEid }, TargettedMessage.SEVERITY_ERROR)); targettedMessageList.addMessage(new TargettedMessage("java.theemail", "no text")); } // SAK-29711 else if( !isValidDomain( parts[1] ) ) { String offendingDomain = (String) CollectionUtils.find( invalidDomains, new Predicate() { @Override public boolean evaluate( Object obj ) { return parts[1].endsWith( (String) obj ); } }); targettedMessageList.addMessage( new TargettedMessage( "nonOfficialAccount.invalidEmailDomain", new Object[] { offendingDomain }, TargettedMessage.SEVERITY_ERROR ) ); } else if (!isValidMail(userEid)) { // must be a valid email address targettedMessageList.addMessage(new TargettedMessage("java.emailaddress", new Object[] { userEid }, TargettedMessage.SEVERITY_ERROR)); } else { Participant participant = new Participant(); try { // if the nonOfficialAccount user already exists User u = userDirectoryService .getUserByEid(userEid); if (site != null && site.getUserRole(u.getId()) != null) { // user already exists in the site, cannot be // added again existingUsers.add(userEid); } else { participant.name = u.getDisplayName(); participant.uniqname = userEid; participant.active = true; pList.add(participant); } } catch (UserNotDefinedException e) { log.debug("no user with eid: " + userEid); /* * The account may exist with a different eid */ User u = null; Collection<User> usersWithEmail = userDirectoryService.findUsersByEmail(userEid); if(usersWithEmail != null) { log.debug("found a collection of matching email users: " + usersWithEmail.size()); if(usersWithEmail.isEmpty()) { // If the collection is empty, we didn't find any users with this email address log.info("Unable to find users with email " + userEid); } else if (usersWithEmail.size() == 1) { // We found one user with this email address. Use it. u = (User)usersWithEmail.iterator().next(); } else if (usersWithEmail.size() > 1) { // If we have multiple users with this email address, pick one and log this error condition // TODO Should we not pick a user? Throw an exception? log.warn("Found multiple user with email " + userEid); u = (User)usersWithEmail.iterator().next(); } } if (u == null) { // if the nonOfficialAccount user is not in the system // yet participant.name = userEid; participant.uniqname = userEid; // TODO: // what // would // the // UDS // case // this // name // to? // -ggolden participant.active = true; if (!userDirectoryService.allowAddUser()) { targettedMessageList.addMessage(new TargettedMessage("java.haveadd",new Object[] { userEid }, TargettedMessage.SEVERITY_ERROR)); log.warn(this + ".checkAddParticipant: user" + userDirectoryService.getCurrentUser()!= null ? userDirectoryService.getCurrentUser().getEid():"" + " don't have permission to add " + userEid); } } else { if (site != null && site.getUserRole(u.getId()) != null) { // user already exists in the site, cannot be added // again existingUsers.add(userEid); } else { log.debug("adding: " + u.getDisplayName() + ", " + u.getEid()); participant.name = u.getDisplayName(); participant.uniqname = u.getEid(); participant.active = true; userEid = u.getEid(); } } pList.add(participant); } // update the userRoleTable if (!getUsers().contains(userEid) && !existingUsers.contains(userEid)) { userRoleEntries.add(new UserRoleEntry(userEid, "", userFirstName, userLastName)); // not existed user, update account updatedNonOfficialAccountParticipant += currentNonOfficialAccount+ "\n"; } } } // if } // } // nonOfficialAccounts // update participant attributes officialAccountParticipant = updatedOfficialAccountParticipant; nonOfficialAccountParticipant = updatedNonOfficialAccountParticipant; if ("same_role".equals(roleChoice)) { targettedMessageList.addMessage(new TargettedMessage("java.roletype", null, TargettedMessage.SEVERITY_ERROR)); } // remove duplicate or existing user from participant list pList = removeDuplicateParticipants(pList); // if the add participant list is empty after above removal, stay in the // current page // add alert for attempting to add existing site user(s) if (!existingUsers.isEmpty()) { int count = 0; String accounts = ""; for (Iterator<String> eIterator = existingUsers.iterator(); eIterator .hasNext();) { if (count == 0) { accounts = (String) eIterator.next(); } else { accounts = accounts + ", " + (String) eIterator.next(); } count++; } targettedMessageList.addMessage(new TargettedMessage("add.existingpart.1", new Object[]{accounts}, TargettedMessage.SEVERITY_INFO)); if (!pList.isEmpty()) { // continue add targettedMessageList.addMessage(new TargettedMessage("add.existingpart.2", null, TargettedMessage.SEVERITY_INFO)); } else { // no valid user input left, prompt for more targettedMessageList.addMessage(new TargettedMessage("java.guest", null, TargettedMessage.SEVERITY_ERROR)); } } } // checkAddParticipant /** * Checks if the given domain ends with any of the invalid domains listed in sakai.properties * @param domain the domain suffix to be checked * @return true if the domain is valid; false otherwise */ private boolean isValidDomain( String domain ) { return !StringUtils.endsWithAny( domain, invalidDomains.toArray( new String[invalidDomains.size()] ) ); } private boolean isValidMail(String email) { if (email == null || "".equals(email)) return false; email = email.trim(); EmailValidator ev = EmailValidator.getInstance(); return ev.isValid(email); } private List<Participant> removeDuplicateParticipants(List<Participant> pList) { // check the uniqueness of list member Set<String> s = new HashSet<>(); Set<String> uniqnameSet = new HashSet<>(); List<Participant> rv = new ArrayList<>(); for( Participant pList1 : pList ) { Participant p = (Participant) pList1; if (!uniqnameSet.contains(p.getUniqname())) { // no entry for the account yet rv.add(p); uniqnameSet.add(p.getUniqname()); } else { // found duplicates s.add(p.getUniqname()); } } if (!s.isEmpty()) { int count = 0; String accounts = ""; for (Iterator<String> i = s.iterator(); i.hasNext();) { if (count == 0) { accounts = (String) i.next(); } else { accounts = accounts + ", " + (String) i.next(); } count++; } targettedMessageList.addMessage(new TargettedMessage(count==1?"add.duplicatedpart.single":"add.duplicatedpart",new Object[]{accounts}, TargettedMessage.SEVERITY_INFO)); } return rv; } private void reset() { site = null; siteId = null; realm = null; roles.clear(); officialAccountParticipant = null; officialAccountEidOnly = new ArrayList<>(); nonOfficialAccountParticipant = null; roleChoice = "sameRole"; statusChoice = "active"; sameRoleChoice = null; emailNotiChoice = Boolean.FALSE.toString(); userRoleEntries = new ArrayList<>(); } public void setNotiProvider(UserNotificationProvider notiProvider) { this.notiProvider = notiProvider; } /** * This is to update the handler's officialAccountParticipant attribute when encountering multiple users with same email address. * The visual result is that the official account list will be expanded to include eids from all matches * * @param officialAccount * @param u * @param eidsForAllMatches */ protected void updateOfficialAccountParticipant(String officialAccount, User u, String eidsForAllMatches) { if (u != null && !eidsForAllMatches.contains(u.getEid())) { eidsForAllMatches = u.getEid() + "\n" + eidsForAllMatches; } // replace the original official account entry with eids from all matches. officialAccountParticipant = officialAccountParticipant.replaceAll(officialAccount, eidsForAllMatches); } /** * get the settings whether non official account users are allowed or not * site-wide settings can override the system-wide settings * @return */ public String getAllowNonOfficialAccount() { // get system setting first String rv = getServerConfigurationString("nonOfficialAccount", "true"); // get site property, if different, it overrides sakai.properties setting if (site == null) { log.error("Could not get site and thus, site properties."); } else { String allowThisSiteAddNonOfficialParticipant = site.getProperties().getProperty("nonOfficialAccount"); log.debug("Site non-official allowed? "+allowThisSiteAddNonOfficialParticipant); if (allowThisSiteAddNonOfficialParticipant != null && !allowThisSiteAddNonOfficialParticipant.equalsIgnoreCase(rv)) { rv = allowThisSiteAddNonOfficialParticipant; } } return rv; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dstream.tez; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.stream.Stream; import org.apache.tez.dag.api.Vertex; import io.dstream.DStreamConstants; import io.dstream.SerializableStreamAssets.SerFunction; import io.dstream.SerializableStreamAssets.SerSupplier; import io.dstream.support.Classifier; import io.dstream.support.HashClassifier; import io.dstream.utils.ReflectionUtils; /** * * */ public class TaskDescriptor { private final String name; private final int id; private final TaskDescriptor previousTaskDescriptor; private final String operationName; private SerFunction<Stream<?>, Stream<?>> function; private Classifier classifier; private int parallelism = 1; private Class<?> sourceElementType; private SerSupplier<?> sourceSupplier; private List<List<TaskDescriptor>> dependentTasksChains; private Class<?> inputFormatClass; /** * Will create description of a {@link Task} from which Tez {@link Vertex} is created. * Parallelism and {@link Classifier} of the task (Vertex) is determined * from {@link DStreamConstants#PARALLELISM} configuration * which allows to configure both parallelism and {@link Classifier}. However, due to Tez way of * doing things, the actual function itself should be applied to the previous task (Vertex) * where the actual partitioning logic is invoked, while integer value representing parallelism should *also* * be set on the current Vertex. * To accommodate that the {@link TaskDescriptor} is created with reference to the previous * {@link TaskDescriptor}. Upon determining partitioner configuration and parallelism for the current task, * the actual {@link Classifier} is created and set on the previous {@link TaskDescriptor} while * it's parallelism is set on this task. * * @param id * @param name * @param operationName * @param executionConfig * @param previousTaskDescriptor */ public TaskDescriptor(int id, String name, String operationName, Properties executionConfig, TaskDescriptor previousTaskDescriptor){ this.name = name; this.id = id; this.operationName = operationName; this.previousTaskDescriptor = previousTaskDescriptor; String parallelizmProp = executionConfig.getProperty(DStreamConstants.PARALLELISM); String grouperProp = executionConfig.getProperty(DStreamConstants.CLASSIFIER); if (parallelizmProp != null){ this.parallelism = Integer.parseInt(parallelizmProp); } Classifier classifier = grouperProp != null ? ReflectionUtils.newInstance(grouperProp, new Class[]{int.class}, new Object[]{this.parallelism}) : new HashClassifier(this.parallelism); this.setClassifier(classifier); } /** * * @return */ public TaskDescriptor getPreviousTaskDescriptor() { return previousTaskDescriptor; } /** * * @return */ public int getParallelism() { return parallelism; } /** * * @return */ public List<List<TaskDescriptor>> getDependentTasksChains() { return this.dependentTasksChains; } /** * * @return */ public String getOperationName() { return operationName; } /** * * @param dependentTasksChain */ public void addDependentTasksChain(List<TaskDescriptor> dependentTasksChain) { if (this.dependentTasksChains == null){ this.dependentTasksChains = new ArrayList<>(); } this.dependentTasksChains.add(dependentTasksChain); } /** * * @return */ public Class<?> getInputFormatClass() { return inputFormatClass; } /** * * @param inputFormatClass */ public void setInputFormatClass(Class<?> inputFormatClass) { this.inputFormatClass = inputFormatClass; } /** * * @return */ public SerSupplier<?> getSourceSupplier() { return this.sourceSupplier; } /** * * @return */ public int getId() { return this.id; } /** * * @return */ public Classifier getClassifier() { return this.classifier; } /** * * @return */ public SerFunction<Stream<?>, Stream<?>> getFunction() { return this.function; } /** * * @param cFunction */ public void compose(SerFunction<Stream<?>, Stream<?>> cFunction) { if (this.function != null){ this.function = this.function.compose(cFunction); } else { this.function = cFunction; } } /** * * @param aFunction */ public void andThen(SerFunction<Stream<?>, Stream<?>> aFunction) { if (this.function != null){ this.function = aFunction.compose(this.function); } else { this.function = aFunction; } } /** * * @return */ public String getName() { return name; } /** * * @return */ public Class<?> getSourceElementType() { return sourceElementType; } /** * */ void setSourceElementType(Class<?> sourceElementType) { this.sourceElementType = sourceElementType; } /** * */ void setClassifier(Classifier classifier) { this.classifier = classifier; } /** * */ void setSourceSupplier(SerSupplier<?> sourceSupplier) { this.sourceSupplier = sourceSupplier; } }
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.itests.paxexam.support; import org.fusesource.fabric.api.Container; import org.fusesource.fabric.api.ContainerRegistration; import org.fusesource.fabric.api.CreateChildContainerOptions; import org.fusesource.fabric.api.CreateContainerBasicOptions; import org.fusesource.fabric.api.FabricException; import org.fusesource.fabric.api.FabricService; import org.fusesource.fabric.api.Profile; import org.fusesource.fabric.api.Version; import org.fusesource.fabric.service.jclouds.CreateJCloudsContainerOptions; import org.fusesource.fabric.service.ssh.CreateSshContainerOptions; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.fusesource.tooling.testing.pax.exam.karaf.ServiceLocator.getOsgiService; import static org.fusesource.fabric.itests.paxexam.support.ServiceProxy.getOsgiServiceProxy; public abstract class ContainerBuilder<T extends ContainerBuilder, B extends CreateContainerBasicOptions.Builder> { public static final Long CREATE_TIMEOUT = 10 * 60000L; public static final Long PROVISION_TIMEOUT = 5 * 60000L; public static final String CONTAINER_TYPE_PROPERTY = "FABRIC_ITEST_CONTAINER_TYPE"; public static final String CONTAINER_NUMBER_PROPERTY = "FABRIC_ITEST_CONTAINER_NUMBER"; public static final Set<Container> CONTAINERS = new HashSet<Container>(); private final B optionsBuilder; private final Set<String> profileNames = new HashSet<String>(); private boolean waitForProvisioning; private boolean assertProvisioningResult; private long provisionTimeOut = PROVISION_TIMEOUT; private ExecutorService executorService = Executors.newCachedThreadPool(); protected ContainerBuilder(B optionsBuilder) { this.optionsBuilder = optionsBuilder; } public static ContainerBuilder create() { return create(1); } public static ContainerBuilder create(int minimumNumber) { return create(minimumNumber, 0); } public static ContainerBuilder create(int minimumNumber, int maximumNumber) { String containerType = System.getProperty(CONTAINER_TYPE_PROPERTY, "child"); int numberOfContainers = Math.max(minimumNumber, Integer.parseInt(System.getProperty(CONTAINER_NUMBER_PROPERTY, "1"))); if (maximumNumber < numberOfContainers && maximumNumber != 0) { numberOfContainers = minimumNumber; } if ("child".equals(containerType)) { return child(numberOfContainers); } else if ("jclouds".equals(containerType)) { return jclouds(numberOfContainers); } else if ("ssh".equals(containerType)) { return ssh(numberOfContainers); } else { return child(1); } } /** * Creates a {@link ChildContainerBuilder}. * * @return */ public static ChildContainerBuilder child() { return child(1); } /** * Creates a {@link ChildContainerBuilder}. * * @param numberOfContainers The number of {@link Container}s that the builder will create. * @return */ public static ChildContainerBuilder child(int numberOfContainers) { return new ChildContainerBuilder(CreateChildContainerOptions.builder().number(numberOfContainers)); } /** * Creates a {@link JcloudsContainerBuilder}. * * @return */ public static JcloudsContainerBuilder jclouds() { return new JcloudsContainerBuilder(CreateJCloudsContainerOptions.builder()); } /** * Creates a {@link JcloudsContainerBuilder} * * @param numberOfContainers The number of {@link Container}s the builder will create. * @return */ public static JcloudsContainerBuilder jclouds(int numberOfContainers) { return new JcloudsContainerBuilder(CreateJCloudsContainerOptions.builder().number(numberOfContainers)); } /** * Creates a {@link SshContainerBuilder}. * * @return */ public static SshContainerBuilder ssh() { return new SshContainerBuilder(CreateSshContainerOptions.builder()); } /** * Creates an {@link SshContainerBuilder}. * * @param numberOfContainers The number of contaienrs the builder will create. * @return */ public static SshContainerBuilder ssh(int numberOfContainers) { return new SshContainerBuilder(CreateSshContainerOptions.builder().number(1)); } public ContainerBuilder<T, B> withName(String name) { getOptionsBuilder().name(name); return this; } public ContainerBuilder<T, B> withJvmOpts(String jvmOpts) { getOptionsBuilder().jvmOpts(jvmOpts); return this; } public ContainerBuilder<T, B> withResolver(String resolver) { getOptionsBuilder().resolver(resolver); return this; } public ContainerBuilder<T, B> withProfiles(String profile) { profileNames.add(profile); return this; } public ContainerBuilder<T, B> waitForProvisioning() { this.waitForProvisioning = true; return this; } public ContainerBuilder<T, B> assertProvisioningResult() { this.assertProvisioningResult = true; return this; } public B getOptionsBuilder() { return optionsBuilder; } public ExecutorService getExecutorService() { return executorService; } public Future<Set<Container>> prepareAsync(B builder) { FabricService fabricService = getOsgiServiceProxy(FabricService.class); CompletionService<Set<Container>> completionService = new ExecutorCompletionService<Set<Container>>(executorService); return completionService.submit(new CreateContainerTask(fabricService, builder)); } /** * Create the containers. * * @param buildersList * @return */ public Set<Container> build(Collection<B> buildersList) { Set<Container> containers = new HashSet<Container>(); FabricService fabricService = getOsgiServiceProxy(FabricService.class); CompletionService<Set<Container>> completionService = new ExecutorCompletionService<Set<Container>>(executorService); int tasks = 0; for (B options : buildersList) { options.profiles(profileNames); if (!options.isEnsembleServer()) { options.zookeeperUrl(fabricService.getZookeeperUrl()); completionService.submit(new CreateContainerTask(fabricService, options)); tasks++; } } try { for (int i = 0; i < tasks; i++) { Future<Set<Container>> futureContainerSet = completionService.poll(CREATE_TIMEOUT, TimeUnit.MILLISECONDS); Set<Container> containerSet = futureContainerSet.get(); CONTAINERS.addAll(containerSet); containers.addAll(containerSet); } try { if (waitForProvisioning) { Provision.containerStatus(containers, provisionTimeOut); } if (assertProvisioningResult) { Provision.provisioningSuccess(containers, provisionTimeOut); } } catch (Exception e) { throw FabricException.launderThrowable(e); } } catch (Exception e) { throw FabricException.launderThrowable(e); } return containers; } /** * Create the containers. * * @return */ public Set<Container> build() { getOsgiService(ContainerRegistration.class); return build(Arrays.<B>asList(getOptionsBuilder())); } /** * Destroy all containers */ public static void destroy() { FabricService fabricService = getOsgiServiceProxy(FabricService.class); for (Container c : CONTAINERS) { try { //We want to use the latest metadata Container updated = fabricService.getContainer(c.getId()); updated.destroy(); } catch (Exception ex) { ex.printStackTrace(System.err); //noop } } } /** * Stop all containers. * The container directory will not get deleted. */ public static void stop() { FabricService fabricService = getOsgiServiceProxy(FabricService.class); for (Container c : CONTAINERS) { try { //We want to use the latest metadata Container updated = fabricService.getContainer(c.getId()); updated.stop(); } catch (Exception ex) { ex.printStackTrace(System.err); //noop } } } }
/** * <copyright> * </copyright> * * $Id$ */ package org.reclipse.behavior.specification.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENamedElementImpl; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.EcoreUtil; import org.fujaba.commons.identifier.impl.IdentifierImpl; import org.reclipse.behavior.specification.BPArgument; import org.reclipse.behavior.specification.BPMessage; import org.reclipse.behavior.specification.BehavioralpatternPackage; import org.reclipse.structure.specification.PSObject; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>BP Argument</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.reclipse.behavior.specification.impl.BPArgumentImpl#getMessage <em>Message</em>}</li> * <li>{@link org.reclipse.behavior.specification.impl.BPArgumentImpl#getStatement <em>Statement</em>}</li> * <li>{@link org.reclipse.behavior.specification.impl.BPArgumentImpl#getType <em>Type</em>}</li> * </ul> * </p> * * @generated */ public class BPArgumentImpl extends IdentifierImpl implements BPArgument { /** * The default value of the '{@link #getStatement() <em>Statement</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStatement() * @generated * @ordered */ protected static final String STATEMENT_EDEFAULT = null; /** * The cached value of the '{@link #getStatement() <em>Statement</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStatement() * @generated * @ordered */ protected String statement = STATEMENT_EDEFAULT; /** * The cached value of the '{@link #getType() <em>Type</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getType() * @generated * @ordered */ protected PSObject type; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected BPArgumentImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return BehavioralpatternPackage.Literals.BP_ARGUMENT; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BPMessage getMessage() { if (eContainerFeatureID() != BehavioralpatternPackage.BP_ARGUMENT__MESSAGE) return null; return (BPMessage)eContainer(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMessage(BPMessage newMessage, NotificationChain msgs) { msgs = eBasicSetContainer((InternalEObject)newMessage, BehavioralpatternPackage.BP_ARGUMENT__MESSAGE, msgs); return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMessage(BPMessage newMessage) { if (newMessage != eInternalContainer() || (eContainerFeatureID() != BehavioralpatternPackage.BP_ARGUMENT__MESSAGE && newMessage != null)) { if (EcoreUtil.isAncestor(this, newMessage)) throw new IllegalArgumentException("Recursive containment not allowed for " + toString()); NotificationChain msgs = null; if (eInternalContainer() != null) msgs = eBasicRemoveFromContainer(msgs); if (newMessage != null) msgs = ((InternalEObject)newMessage).eInverseAdd(this, BehavioralpatternPackage.BP_MESSAGE__ARGUMENTS, BPMessage.class, msgs); msgs = basicSetMessage(newMessage, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BehavioralpatternPackage.BP_ARGUMENT__MESSAGE, newMessage, newMessage)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getStatement() { return statement; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStatement(String newStatement) { String oldStatement = statement; statement = newStatement; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BehavioralpatternPackage.BP_ARGUMENT__STATEMENT, oldStatement, statement)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PSObject getType() { if (type != null && type.eIsProxy()) { InternalEObject oldType = (InternalEObject)type; type = (PSObject)eResolveProxy(oldType); if (type != oldType) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, BehavioralpatternPackage.BP_ARGUMENT__TYPE, oldType, type)); } } return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PSObject basicGetType() { return type; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setType(PSObject newType) { PSObject oldType = type; type = newType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BehavioralpatternPackage.BP_ARGUMENT__TYPE, oldType, type)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: if (eInternalContainer() != null) msgs = eBasicRemoveFromContainer(msgs); return basicSetMessage((BPMessage)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: return basicSetMessage(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eBasicRemoveFromContainerFeature(NotificationChain msgs) { switch (eContainerFeatureID()) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: return eInternalContainer().eInverseRemove(this, BehavioralpatternPackage.BP_MESSAGE__ARGUMENTS, BPMessage.class, msgs); } return super.eBasicRemoveFromContainerFeature(msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: return getMessage(); case BehavioralpatternPackage.BP_ARGUMENT__STATEMENT: return getStatement(); case BehavioralpatternPackage.BP_ARGUMENT__TYPE: if (resolve) return getType(); return basicGetType(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: setMessage((BPMessage)newValue); return; case BehavioralpatternPackage.BP_ARGUMENT__STATEMENT: setStatement((String)newValue); return; case BehavioralpatternPackage.BP_ARGUMENT__TYPE: setType((PSObject)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: setMessage((BPMessage)null); return; case BehavioralpatternPackage.BP_ARGUMENT__STATEMENT: setStatement(STATEMENT_EDEFAULT); return; case BehavioralpatternPackage.BP_ARGUMENT__TYPE: setType((PSObject)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BehavioralpatternPackage.BP_ARGUMENT__MESSAGE: return getMessage() != null; case BehavioralpatternPackage.BP_ARGUMENT__STATEMENT: return STATEMENT_EDEFAULT == null ? statement != null : !STATEMENT_EDEFAULT.equals(statement); case BehavioralpatternPackage.BP_ARGUMENT__TYPE: return type != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (statement: "); result.append(statement); result.append(')'); return result.toString(); } } //BPArgumentImpl
/* * Copyright (C) 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.sample.castcompanionlibrary.cast; import static com.google.sample.castcompanionlibrary.utils.LogUtils.LOGD; import static com.google.sample.castcompanionlibrary.utils.LogUtils.LOGE; import com.google.android.gms.cast.ApplicationMetadata; import com.google.android.gms.cast.Cast; import com.google.android.gms.cast.Cast.CastOptions.Builder; import com.google.android.gms.cast.CastDevice; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.sample.castcompanionlibrary.cast.callbacks.DataCastConsumerImpl; import com.google.sample.castcompanionlibrary.cast.callbacks.IDataCastConsumer; import com.google.sample.castcompanionlibrary.cast.exceptions.CastException; import com.google.sample.castcompanionlibrary.cast.exceptions.NoConnectionException; import com.google.sample.castcompanionlibrary.cast.exceptions.TransientNetworkDisconnectionException; import com.google.sample.castcompanionlibrary.utils.LogUtils; import com.google.sample.castcompanionlibrary.utils.Utils; import android.content.Context; import android.support.v7.app.MediaRouteDialogFactory; import android.support.v7.media.MediaRouter.RouteInfo; import android.text.TextUtils; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * A concrete subclass of {@link BaseCastManager} that is suitable for data-centric applications * that use multiple namespaces. * <p> * This is a singleton that needs to be "initialized" (by calling <code>initialize()</code>) prior * to usage. Subsequent to initialization, an easier way to get access to the singleton class is to * call a variant of <code>getInstance()</code>. After initialization, callers can enable any * available feature (all features are off by default). To do so, call <code>enableFeature()</code> * and pass an OR-ed expression built from one ore more of the following constants: * <p> * <ul> * <li>FEATURE_DEBUGGING: to enable GMS level logging</li> * </ul> * Beyond managing the connectivity to a cast device, this class provides easy-to-use methods to * send and receive messages using one or more namspaces. These namespaces can be configured during * the initialization as part of the call to <code>initialize()</code> or can be added later on. * Clients can subclass this class to extend the features and functionality beyond what this class * provides. This class manages various states of the remote cast device. Client applications, * however, can complement the default behavior of this class by hooking into various callbacks that * it provides (see {@link IDataCastConsumer}). Since the number of these callbacks is usually much * larger than what a single application might be interested in, there is a no-op implementation of * this interface (see {@link DataCastConsumerImpl}) that applications can subclass to override only * those methods that they are interested in. Since this library depends on the cast functionalities * provided by the Google Play services, the library checks to ensure that the right version of that * service is installed. It also provides a simple static method * <code>checkGooglePlaySevices()</code> that clients can call at an early stage of their * applications to provide a dialog for users if they need to update/activate their GMS library. To * learn more about this library, please read the documentation that is distributed as part of this * library. */ public class DataCastManager extends BaseCastManager implements Cast.MessageReceivedCallback { private static final String TAG = LogUtils.makeLogTag(DataCastManager.class); private static DataCastManager sInstance; private final Set<String> mNamespaceList = new HashSet<String>(); private Set<IDataCastConsumer> mDataConsumers; /** * Initializes the DataCastManager for clients. Before clients can use DataCastManager, they * need to initialize it by calling this static method. Then clients can obtain an instance of * this singleton class by calling {@link DataCastManager#getInstance()}. Failing to initialize * this class before requesting an instance will result in a {@link CastException} exception. * * @param context * @param applicationId the unique ID for your application * @param namespaces to be set up for this class. * @return */ public static DataCastManager initialize(Context context, String applicationId, String... namespaces) { if (null == sInstance) { LOGD(TAG, "New instance of DataCastManager is created"); if (ConnectionResult.SUCCESS != GooglePlayServicesUtil .isGooglePlayServicesAvailable(context)) { String msg = "Couldn't find the appropriate version of Goolge Play Services"; LOGE(TAG, msg); throw new RuntimeException(msg); } sInstance = new DataCastManager(context, applicationId, namespaces); mCastManager = sInstance; } return sInstance; } protected DataCastManager(Context context, String applicationId, String... namespaces) { super(context, applicationId); mDataConsumers = Collections.synchronizedSet(new HashSet<IDataCastConsumer>()); if (null != namespaces) { for (String namespace : namespaces) { mNamespaceList.add(namespace); } } } /** * Returns the initialized instance of this class. If it is not initialized yet, a * {@link CastException} will be thrown. * * @see initialze() * @return * @throws CastException */ public static DataCastManager getInstance() throws CastException { if (null == sInstance) { LOGE(TAG, "No DataCastManager instance was initialized, you need to " + "call initialize() first"); throw new CastException(); } return sInstance; } /** * Returns the initialized instance of this class. If it is not initialized yet, a * {@link CastException} will be thrown. The {@link Context} that is passed as the argument will * be used to update the context. The main purpose of updating context is to enable the library * to provide {@link Context} related functionalities, e.g. it can create an error dialog if * needed. This method is preferred over the similar one without a context argument. * * @see {@link initialize()}, {@link setContext()} * @param ctx the current Context * @return * @throws CastException */ public static DataCastManager getInstance(Context ctx) throws CastException { if (null == sInstance) { LOGE(TAG, "No DataCastManager instance was initialized, you need to " + "call initialize() first"); throw new CastException(); } LOGD(TAG, "Updated context to: " + ctx.getClass().getName()); sInstance.mContext = ctx; return sInstance; } /** * Adds a channel with the given <code>namespace</code> and registers {@link DataCastManager} as * the callback receiver. If the namespace is already registered, this returns * <code>false</code>, otherwise returns <code>true * </code>. * * @param namespace * @return * @throws NoConnectionException If no connectivity to the device exists * @throws TransientNetworkDisconnectionException If framework is still trying to recover from a * possibly transient loss of network * @throws IllegalArgumentException If namespace is null or empty */ public boolean addNamespace(String namespace) throws IllegalStateException, IOException, TransientNetworkDisconnectionException, NoConnectionException { checkConnectivity(); if (TextUtils.isEmpty(namespace)) { throw new IllegalArgumentException("namespace cannot be empty"); } if (mNamespaceList.contains(namespace)) { LOGD(TAG, "Ignoring to add a namespace that is already added."); return false; } try { Cast.CastApi.setMessageReceivedCallbacks(mApiClient, namespace, this); mNamespaceList.add(namespace); return true; } catch (IOException e) { LOGE(TAG, "Failed to add namespace", e); } catch (IllegalStateException e) { LOGE(TAG, "Failed to add namespace", e); } return false; } /** * Unregisters a namespace. If namespace is not already registered, it returns * <code>false</code>, otherwise a successful removal returns <code>true * </code>. * * @param namespace * @return * @throws NoConnectionException If no connectivity to the device exists * @throws TransientNetworkDisconnectionException If framework is still trying to recover from a * possibly transient loss of network * @throws IllegalArgumentException If namespace is null or empty */ public boolean removeNamespace(String namespace) throws TransientNetworkDisconnectionException, NoConnectionException { checkConnectivity(); if (TextUtils.isEmpty(namespace)) { throw new IllegalArgumentException("namespace cannot be empty"); } if (!mNamespaceList.contains(namespace)) { LOGD(TAG, "Ignoring to remove a namespace that is not registered."); return false; } try { Cast.CastApi.removeMessageReceivedCallbacks(mApiClient, namespace); mNamespaceList.remove(namespace); return true; } catch (IOException e) { LOGE(TAG, "Failed to remove namespace: " + namespace, e); } catch (IllegalStateException e) { LOGE(TAG, "Failed to remove namespace: " + namespace, e); } return false; } /** * Sends the <code>message</code> on the data channel for the <code>namespace</code>. If fails, * it will call <code>onMessageSendFailed</code> * * @param message * @param namespace * @throws NoConnectionException If no connectivity to the device exists * @throws TransientNetworkDisconnectionException If framework is still trying to recover from a * possibly transient loss of network * @throws IllegalArgumentException If the the message is null, empty, or too long; or if the * namespace is null or too long. * @throws IllegalStateException If there is no active service connection. * @throws IOException */ public void sendDataMessage(String message, String namespace) throws IllegalArgumentException, IllegalStateException, IOException, TransientNetworkDisconnectionException, NoConnectionException { checkConnectivity(); if (TextUtils.isEmpty(namespace)) { throw new IllegalArgumentException("namespace cannot be empty"); } Cast.CastApi.sendMessage(mApiClient, namespace, message). setResultCallback(new ResultCallback<Status>() { @Override public void onResult(Status result) { if (!result.isSuccess()) { DataCastManager.this.onMessageSendFailed(result); } } }); } /*************************************************************************/ /************** BaseCastManager methods **********************************/ /*************************************************************************/ @Override protected void onDeviceUnselected() { detachDataChannels(); } @Override protected Builder getCastOptionBuilder(CastDevice device) { Builder builder = Cast.CastOptions.builder( mSelectedCastDevice, new CastListener()); if (isFeatureEnabled(FEATURE_DEBUGGING)) { builder.setVerboseLoggingEnabled(true); } return builder; } class CastListener extends Cast.Listener { /* * (non-Javadoc) * @see com.google.android.gms.cast.Cast.Listener#onApplicationDisconnected (int) */ @Override public void onApplicationDisconnected(int statusCode) { DataCastManager.this.onApplicationDisconnected(statusCode); } /* * (non-Javadoc) * @see com.google.android.gms.cast.Cast.Listener#onApplicationStatusChanged () */ @Override public void onApplicationStatusChanged() { DataCastManager.this.onApplicationStatusChanged(); } } @Override protected MediaRouteDialogFactory getMediaRouteDialogFactory() { return null; } /*************************************************************************/ /************** Cast.Listener callbacks **********************************/ /*************************************************************************/ @Override public void onApplicationConnected(ApplicationMetadata appMetadata, String applicationStatus, String sessionId, boolean wasLaunched) { LOGD(TAG, "onApplicationConnected() reached with sessionId: " + sessionId); // saving session for future retrieval; we only save the last session // info Utils.saveStringToPreference(mContext, PREFS_KEY_SESSION_ID, sessionId); if (mReconnectionStatus == ReconnectionStatus.IN_PROGRESS) { // we have tried to reconnect and successfully launched the app, so // it is time to select the route and make the cast icon happy :-) List<RouteInfo> routes = mMediaRouter.getRoutes(); if (null != routes) { String routeId = Utils.getStringFromPreference(mContext, PREFS_KEY_ROUTE_ID); boolean found = false; for (RouteInfo routeInfo : routes) { if (routeId.equals(routeInfo.getId())) { // found the right route LOGD(TAG, "Found the correct route during reconnection attempt"); found = true; mReconnectionStatus = ReconnectionStatus.FINALIZE; mMediaRouter.selectRoute(routeInfo); break; } } if (!found) { // we were hoping to have the route that we wanted, but we // didn't so we deselect the device onDeviceSelected(null); mReconnectionStatus = ReconnectionStatus.INACTIVE; // uncomment the following if you want to clear session // persisted data if a reconnection attempt fails // Utils.saveStringToPreference(mContext, // PREFS_KEY_SESSION_ID, null); // Utils.saveStringToPreference(mContext, // PREFS_KEY_ROUTE_ID, null); return; } } } // registering namespaces, if any try { attachDataChannels(); mSessionId = sessionId; synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onApplicationConnected(appMetadata, applicationStatus, sessionId, wasLaunched); } catch (Exception e) { LOGE(TAG, "onApplicationConnected(): Failed to inform " + consumer, e); } } } } catch (IllegalStateException e) { LOGE(TAG, "Failed to attach namespaces", e); } catch (IOException e) { LOGE(TAG, "Failed to attach namespaces", e); } catch (TransientNetworkDisconnectionException e) { LOGE(TAG, "Failed to attach namespaces", e); } catch (NoConnectionException e) { LOGE(TAG, "Failed to attach namespaces", e); } } /* * Adds namespaces for data channel(s) * @throws NoConnectionException If no connectivity to the device exists * @throws TransientNetworkDisconnectionException If framework is still trying to recover from a * possibly transient loss of network * @throws IOException If an I/O error occurs while performing the request. * @throws IllegalStateException Thrown when the controller is not connected to a CastDevice. * @throws IllegalArgumentException If namespace is null. */ private void attachDataChannels() throws IllegalStateException, IOException, TransientNetworkDisconnectionException, NoConnectionException { checkConnectivity(); if (!mNamespaceList.isEmpty() && null != Cast.CastApi) { for (String namespace : mNamespaceList) { Cast.CastApi.setMessageReceivedCallbacks(mApiClient, namespace, this); } } } /* * Remove namespaces * @throws NoConnectionException If no connectivity to the device exists * @throws TransientNetworkDisconnectionException If framework is still trying to recover from a * possibly transient loss of network */ private void detachDataChannels() { if (!mNamespaceList.isEmpty() && null != Cast.CastApi && null != mApiClient) { for (String namespace : mNamespaceList) { try { Cast.CastApi.removeMessageReceivedCallbacks(mApiClient, namespace); } catch (Exception e) { LOGE(TAG, "Failed to add namespace: " + namespace, e); } } } } @Override public void onApplicationConnectionFailed(int errorCode) { onDeviceSelected(null); synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onApplicationConnectionFailed(errorCode); } catch (Exception e) { LOGE(TAG, "onApplicationConnectionFailed(): Failed to inform " + consumer, e); } } } } public void onApplicationDisconnected(int errorCode) { synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onApplicationDisconnected(errorCode); } catch (Exception e) { LOGE(TAG, "onApplicationDisconnected(): Failed to inform " + consumer, e); } } } if (null != mMediaRouter) { mMediaRouter.selectRoute(mMediaRouter.getDefaultRoute()); } onDeviceSelected(null); } public void onApplicationStatusChanged() { String appStatus = null; if (!isConnected()) { return; } try { appStatus = Cast.CastApi.getApplicationStatus(mApiClient); LOGD(TAG, "onApplicationStatusChanged() reached: " + Cast.CastApi.getApplicationStatus(mApiClient)); synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onApplicationStatusChanged(appStatus); } catch (Exception e) { LOGE(TAG, "onApplicationStatusChanged(): Failed to inform " + consumer, e); } } } } catch (IllegalStateException e) { LOGE(TAG, "onApplicationStatusChanged(): Failed", e); } } @Override public void onApplicationStopFailed(int errorCode) { synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onApplicationStopFailed(errorCode); } catch (Exception e) { LOGE(TAG, "onApplicationStopFailed(): Failed to inform " + consumer, e); } } } } public void onVolumeChanged() { // nothing relevant to data } /*************************************************************************/ /************** MessageReceivedCallbacks callbacks ***********************/ /*************************************************************************/ @Override public void onMessageReceived(CastDevice castDevice, String namespace, String message) { synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onMessageReceived(castDevice, namespace, message); } catch (Exception e) { LOGE(TAG, "onMessageReceived(): Failed to inform " + consumer, e); } } } } public void onMessageSendFailed(Status result) { synchronized (mDataConsumers) { for (IDataCastConsumer consumer : mDataConsumers) { try { consumer.onMessageSendFailed(result); } catch (Exception e) { LOGE(TAG, "onMessageSendFailed(): Failed to inform " + consumer, e); } } } } /*************************************************************/ /***** Registering IDataCastConsumer listeners ***************/ /*************************************************************/ /** * Registers an {@link IDataCastConsumer} interface with this class. Registered listeners will * be notified of changes to a variety of lifecycle and status changes through the callbacks * that the interface provides. * * @see DataCastConsumerImpl * @param listener */ public void addDataCastConsumer(IDataCastConsumer listener) { if (null != listener) { super.addBaseCastConsumer(listener); boolean result = false; synchronized (mDataConsumers) { result = mDataConsumers.add(listener); } if (result) { LOGD(TAG, "Successfully added the new DataCastConsumer listener " + listener); } else { LOGD(TAG, "Adding Listener " + listener + " was already registered, " + "skipping this step"); } } } /** * Unregisters an {@link IDataCastConsumer}. * * @param listener */ public void removeDataCastConsumer(IDataCastConsumer listener) { if (null != listener) { super.removeBaseCastConsumer(listener); synchronized (mDataConsumers) { mDataConsumers.remove(listener); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.util.TestLogger; import org.apache.flink.util.concurrent.Executors; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.SortedMap; import java.util.TreeMap; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** Test for the {@link TaskLocalStateStoreImpl}. */ public class TaskLocalStateStoreImplTest extends TestLogger { private SortedMap<Long, TaskStateSnapshot> internalSnapshotMap; private Object internalLock; private TemporaryFolder temporaryFolder; private File[] allocationBaseDirs; private TaskLocalStateStoreImpl taskLocalStateStore; @Before public void before() throws Exception { JobID jobID = new JobID(); AllocationID allocationID = new AllocationID(); JobVertexID jobVertexID = new JobVertexID(); int subtaskIdx = 0; this.temporaryFolder = new TemporaryFolder(); this.temporaryFolder.create(); this.allocationBaseDirs = new File[] {temporaryFolder.newFolder(), temporaryFolder.newFolder()}; this.internalSnapshotMap = new TreeMap<>(); this.internalLock = new Object(); LocalRecoveryDirectoryProviderImpl directoryProvider = new LocalRecoveryDirectoryProviderImpl( allocationBaseDirs, jobID, jobVertexID, subtaskIdx); LocalRecoveryConfig localRecoveryConfig = new LocalRecoveryConfig(directoryProvider); this.taskLocalStateStore = new TaskLocalStateStoreImpl( jobID, allocationID, jobVertexID, subtaskIdx, localRecoveryConfig, Executors.directExecutor(), internalSnapshotMap, internalLock); } @After public void after() { this.temporaryFolder.delete(); } /** Test that the instance delivers a correctly configured LocalRecoveryDirectoryProvider. */ @Test public void getLocalRecoveryRootDirectoryProvider() { LocalRecoveryConfig directoryProvider = taskLocalStateStore.getLocalRecoveryConfig(); Assert.assertEquals( allocationBaseDirs.length, directoryProvider.getLocalStateDirectoryProvider().get().allocationBaseDirsCount()); for (int i = 0; i < allocationBaseDirs.length; ++i) { Assert.assertEquals( allocationBaseDirs[i], directoryProvider .getLocalStateDirectoryProvider() .get() .selectAllocationBaseDirectory(i)); } } /** Tests basic store/retrieve of local state. */ @Test public void storeAndRetrieve() throws Exception { final int chkCount = 3; for (int i = 0; i < chkCount; ++i) { Assert.assertNull(taskLocalStateStore.retrieveLocalState(i)); } List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount); checkStoredAsExpected(taskStateSnapshots, 0, chkCount); Assert.assertNull(taskLocalStateStore.retrieveLocalState(chkCount + 1)); } /** Test checkpoint pruning. */ @Test public void pruneCheckpoints() throws Exception { final int chkCount = 3; List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount); // test retrieve with pruning taskLocalStateStore.pruneMatchingCheckpoints((long chk) -> chk != chkCount - 1); for (int i = 0; i < chkCount - 1; ++i) { Assert.assertNull(taskLocalStateStore.retrieveLocalState(i)); } checkStoredAsExpected(taskStateSnapshots, chkCount - 1, chkCount); } /** Tests pruning of previous checkpoints if a new checkpoint is confirmed. */ @Test public void confirmCheckpoint() throws Exception { final int chkCount = 3; final int confirmed = chkCount - 1; List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount); taskLocalStateStore.confirmCheckpoint(confirmed); checkPrunedAndDiscarded(taskStateSnapshots, 0, confirmed); checkStoredAsExpected(taskStateSnapshots, confirmed, chkCount); } /** Tests pruning of target previous checkpoints if that checkpoint is aborted. */ @Test public void abortCheckpoint() throws Exception { final int chkCount = 4; final int aborted = chkCount - 2; List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount); taskLocalStateStore.abortCheckpoint(aborted); checkPrunedAndDiscarded(taskStateSnapshots, aborted, aborted + 1); checkStoredAsExpected(taskStateSnapshots, 0, aborted); checkStoredAsExpected(taskStateSnapshots, aborted + 1, chkCount); } /** * Tests that disposal of a {@link TaskLocalStateStoreImpl} works and discards all local states. */ @Test public void dispose() throws Exception { final int chkCount = 3; final int confirmed = chkCount - 1; List<TestingTaskStateSnapshot> taskStateSnapshots = storeStates(chkCount); taskLocalStateStore.confirmCheckpoint(confirmed); taskLocalStateStore.dispose(); checkPrunedAndDiscarded(taskStateSnapshots, 0, chkCount); } private void checkStoredAsExpected(List<TestingTaskStateSnapshot> history, int start, int end) { for (int i = start; i < end; ++i) { TestingTaskStateSnapshot expected = history.get(i); assertTrue(expected == taskLocalStateStore.retrieveLocalState(i)); assertFalse(expected.isDiscarded()); } } private void checkPrunedAndDiscarded( List<TestingTaskStateSnapshot> history, int start, int end) { for (int i = start; i < end; ++i) { Assert.assertNull(taskLocalStateStore.retrieveLocalState(i)); assertTrue(history.get(i).isDiscarded()); } } private List<TestingTaskStateSnapshot> storeStates(int count) { List<TestingTaskStateSnapshot> taskStateSnapshots = new ArrayList<>(count); for (int i = 0; i < count; ++i) { OperatorID operatorID = new OperatorID(); TestingTaskStateSnapshot taskStateSnapshot = new TestingTaskStateSnapshot(); OperatorSubtaskState operatorSubtaskState = OperatorSubtaskState.builder().build(); taskStateSnapshot.putSubtaskStateByOperatorID(operatorID, operatorSubtaskState); taskLocalStateStore.storeLocalState(i, taskStateSnapshot); taskStateSnapshots.add(taskStateSnapshot); } return taskStateSnapshots; } private static final class TestingTaskStateSnapshot extends TaskStateSnapshot { private static final long serialVersionUID = 2046321877379917040L; private boolean isDiscarded = false; @Override public void discardState() throws Exception { super.discardState(); isDiscarded = true; } boolean isDiscarded() { return isDiscarded; } } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy; import java.util.Comparator; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Matches version ranges: * <ul> * <li>[1.0,2.0] matches all versions greater or equal to 1.0 and lower or equal to 2.0 </li> * <li>[1.0,2.0[ matches all versions greater or equal to 1.0 and lower than 2.0 </li> * <li>]1.0,2.0] matches all versions greater than 1.0 and lower or equal to 2.0 </li> * <li>]1.0,2.0[ matches all versions greater than 1.0 and lower than 2.0 </li> * <li>[1.0,) matches all versions greater or equal to 1.0 </li> * <li>]1.0,) matches all versions greater than 1.0 </li> * <li>(,2.0] matches all versions lower or equal to 2.0 </li> * <li>(,2.0[matches all versions lower than 2.0 </li> * </ul> * This class uses a latest strategy to compare revisions. If * none is set, it uses the default one of the ivy instance set through setIvy(). If neither a * latest strategy nor a ivy instance is set, an IllegalStateException will be thrown when calling * accept(). Note that it can't work with latest time strategy, cause no time is known for the * limits of the range. Therefore only purely revision based LatestStrategy can be used. */ public class VersionRangeSelector extends AbstractVersionVersionSelector { private static final String OPEN_INC = "["; private static final String OPEN_EXC = "]"; private static final String OPEN_EXC_MAVEN = "("; private static final String CLOSE_INC = "]"; private static final String CLOSE_EXC = "["; private static final String CLOSE_EXC_MAVEN = ")"; private static final String LOWER_INFINITE = "("; private static final String UPPER_INFINITE = ")"; private static final String SEPARATOR = ","; // following patterns are built upon constants above and should not be modified private static final String OPEN_INC_PATTERN = "\\" + OPEN_INC; private static final String OPEN_EXC_PATTERN = "\\" + OPEN_EXC + "\\" + OPEN_EXC_MAVEN; private static final String CLOSE_INC_PATTERN = "\\" + CLOSE_INC; private static final String CLOSE_EXC_PATTERN = "\\" + CLOSE_EXC + "\\" + CLOSE_EXC_MAVEN; private static final String LI_PATTERN = "\\" + LOWER_INFINITE; private static final String UI_PATTERN = "\\" + UPPER_INFINITE; private static final String SEP_PATTERN = "\\s*\\" + SEPARATOR + "\\s*"; private static final String OPEN_PATTERN = "[" + OPEN_INC_PATTERN + OPEN_EXC_PATTERN + "]"; private static final String CLOSE_PATTERN = "[" + CLOSE_INC_PATTERN + CLOSE_EXC_PATTERN + "]"; private static final String ANY_NON_SPECIAL_PATTERN = "[^\\s" + SEPARATOR + OPEN_INC_PATTERN + OPEN_EXC_PATTERN + CLOSE_INC_PATTERN + CLOSE_EXC_PATTERN + LI_PATTERN + UI_PATTERN + "]"; private static final String FINITE_PATTERN = OPEN_PATTERN + "\\s*(" + ANY_NON_SPECIAL_PATTERN + "+)" + SEP_PATTERN + "(" + ANY_NON_SPECIAL_PATTERN + "+)\\s*" + CLOSE_PATTERN; private static final String LOWER_INFINITE_PATTERN = LI_PATTERN + SEP_PATTERN + "(" + ANY_NON_SPECIAL_PATTERN + "+)\\s*" + CLOSE_PATTERN; private static final String UPPER_INFINITE_PATTERN = OPEN_PATTERN + "\\s*(" + ANY_NON_SPECIAL_PATTERN + "+)" + SEP_PATTERN + UI_PATTERN; private static final String SINGLE_VALUE_PATTERN = OPEN_INC_PATTERN + "\\s*(" + ANY_NON_SPECIAL_PATTERN + "+)" + CLOSE_INC_PATTERN; private static final Pattern FINITE_RANGE = Pattern.compile(FINITE_PATTERN); private static final Pattern LOWER_INFINITE_RANGE = Pattern.compile(LOWER_INFINITE_PATTERN); private static final Pattern UPPER_INFINITE_RANGE = Pattern.compile(UPPER_INFINITE_PATTERN); private static final Pattern SINGLE_VALUE_RANGE = Pattern.compile(SINGLE_VALUE_PATTERN); public static final Pattern ALL_RANGE = Pattern.compile(FINITE_PATTERN + "|" + LOWER_INFINITE_PATTERN + "|" + UPPER_INFINITE_PATTERN + "|" + SINGLE_VALUE_RANGE); private final String upperBound; private final Version upperBoundVersion; private final boolean upperInclusive; private final String lowerBound; private final boolean lowerInclusive; private final Version lowerBoundVersion; private final Comparator<Version> comparator; public VersionRangeSelector(String selector, Comparator<Version> comparator, VersionParser versionParser) { super(versionParser, selector); this.comparator = comparator; Matcher matcher; matcher = FINITE_RANGE.matcher(selector); if (matcher.matches()) { lowerBound = matcher.group(1); lowerInclusive = selector.startsWith(OPEN_INC); upperBound = matcher.group(2); upperInclusive = selector.endsWith(CLOSE_INC); } else { matcher = LOWER_INFINITE_RANGE.matcher(selector); if (matcher.matches()) { lowerBound = null; lowerInclusive = true; upperBound = matcher.group(1); upperInclusive = selector.endsWith(CLOSE_INC); } else { matcher = UPPER_INFINITE_RANGE.matcher(selector); if (matcher.matches()) { lowerBound = matcher.group(1); lowerInclusive = selector.startsWith(OPEN_INC); upperBound = null; upperInclusive = true; } else { matcher = SINGLE_VALUE_RANGE.matcher(selector); if (matcher.matches()) { lowerBound = matcher.group(1); lowerInclusive = true; upperBound = lowerBound; upperInclusive = true; } else { throw new IllegalArgumentException("Not a version range selector: " + selector); } } } } lowerBoundVersion = lowerBound == null ? null : versionParser.transform(lowerBound); upperBoundVersion = upperBound == null ? null : versionParser.transform(upperBound); } @Override public boolean isDynamic() { return true; } @Override public boolean requiresMetadata() { return false; } @Override public boolean matchesUniqueVersion() { return false; } @Override public boolean accept(Version candidate) { if (lowerBound != null && !isHigher(candidate, lowerBoundVersion, lowerInclusive)) { return false; } if (upperBound != null && !isLower(candidate, upperBoundVersion, upperInclusive)) { return false; } return true; } /** * Tells if version1 is lower than version2. */ private boolean isLower(Version version1, Version version2, boolean inclusive) { int result = comparator.compare(version1, version2); return result <= (inclusive ? 0 : -1); } /** * Tells if version1 is higher than version2. */ private boolean isHigher(Version version1, Version version2, boolean inclusive) { int result = comparator.compare(version1, version2); return result >= (inclusive ? 0 : 1); } public String getUpperBound() { return upperBound; } public Version getUpperBoundVersion() { return upperBoundVersion; } public boolean isUpperInclusive() { return upperInclusive; } public String getLowerBound() { return lowerBound; } public boolean isLowerInclusive() { return lowerInclusive; } public Version getLowerBoundVersion() { return lowerBoundVersion; } @Override public String toString() { return getSelector(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } VersionRangeSelector that = (VersionRangeSelector) o; if (upperInclusive != that.upperInclusive) { return false; } if (lowerInclusive != that.lowerInclusive) { return false; } if (upperBound != null ? !upperBound.equals(that.upperBound) : that.upperBound != null) { return false; } if (upperBoundVersion != null ? !upperBoundVersion.equals(that.upperBoundVersion) : that.upperBoundVersion != null) { return false; } if (lowerBound != null ? !lowerBound.equals(that.lowerBound) : that.lowerBound != null) { return false; } if (lowerBoundVersion != null ? !lowerBoundVersion.equals(that.lowerBoundVersion) : that.lowerBoundVersion != null) { return false; } return comparator.equals(that.comparator); } @Override public int hashCode() { int result = upperBound != null ? upperBound.hashCode() : 0; result = 31 * result + (upperBoundVersion != null ? upperBoundVersion.hashCode() : 0); result = 31 * result + (upperInclusive ? 1 : 0); result = 31 * result + (lowerBound != null ? lowerBound.hashCode() : 0); result = 31 * result + (lowerInclusive ? 1 : 0); result = 31 * result + (lowerBoundVersion != null ? lowerBoundVersion.hashCode() : 0); result = 31 * result + comparator.hashCode(); return result; } }
package liquibase.lockservice; import liquibase.Scope; import liquibase.change.Change; import liquibase.configuration.GlobalConfiguration; import liquibase.configuration.LiquibaseConfiguration; import liquibase.database.Database; import liquibase.database.ObjectQuotingStrategy; import liquibase.database.core.DB2Database; import liquibase.database.core.DerbyDatabase; import liquibase.database.core.MSSQLDatabase; import liquibase.diff.output.DiffOutputControl; import liquibase.diff.output.changelog.ChangeGeneratorFactory; import liquibase.exception.DatabaseException; import liquibase.exception.LiquibaseException; import liquibase.exception.LockException; import liquibase.exception.UnexpectedLiquibaseException; import liquibase.executor.Executor; import liquibase.executor.ExecutorService; import liquibase.snapshot.InvalidExampleException; import liquibase.snapshot.SnapshotGeneratorFactory; import liquibase.sql.Sql; import liquibase.sqlgenerator.SqlGeneratorFactory; import liquibase.statement.SqlStatement; import liquibase.statement.core.*; import liquibase.structure.DatabaseObject; import liquibase.structure.core.Table; import java.text.DateFormat; import java.util.*; import static java.util.ResourceBundle.getBundle; public class StandardLockService implements LockService { private static ResourceBundle coreBundle = getBundle("liquibase/i18n/liquibase-core"); protected Database database; protected boolean hasChangeLogLock; private Long changeLogLockPollRate; private Long changeLogLockRecheckTime; private Boolean hasDatabaseChangeLogLockTable; private boolean isDatabaseChangeLogLockTableInitialized; private ObjectQuotingStrategy quotingStrategy; public StandardLockService() { } @Override public int getPriority() { return PRIORITY_DEFAULT; } @Override public boolean supports(Database database) { return true; } @Override public void setDatabase(Database database) { this.database = database; } public Long getChangeLogLockWaitTime() { if (changeLogLockPollRate != null) { return changeLogLockPollRate; } return LiquibaseConfiguration.getInstance().getConfiguration(GlobalConfiguration.class) .getDatabaseChangeLogLockWaitTime(); } @Override public void setChangeLogLockWaitTime(long changeLogLockWaitTime) { this.changeLogLockPollRate = changeLogLockWaitTime; } public Long getChangeLogLockRecheckTime() { if (changeLogLockRecheckTime != null) { return changeLogLockRecheckTime; } return LiquibaseConfiguration.getInstance().getConfiguration(GlobalConfiguration.class) .getDatabaseChangeLogLockPollRate(); } @Override public void setChangeLogLockRecheckTime(long changeLogLockRecheckTime) { this.changeLogLockRecheckTime = changeLogLockRecheckTime; } @Override public void init() throws DatabaseException { boolean createdTable = false; Executor executor = ExecutorService.getInstance().getExecutor(database); if (!hasDatabaseChangeLogLockTable()) { try { executor.comment("Create Database Lock Table"); executor.execute(new CreateDatabaseChangeLogLockTableStatement()); database.commit(); Scope.getCurrentScope().getLog(getClass()).fine( "Created database lock table with name: " + database.escapeTableName( database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName() ) ); } catch (DatabaseException e) { if ((e.getMessage() != null) && e.getMessage().contains("exists")) { //hit a race condition where the table got created by another node. Scope.getCurrentScope().getLog(getClass()).fine("Database lock table already appears to exist " + "due to exception: " + e.getMessage() + ". Continuing on"); } else { throw e; } } this.hasDatabaseChangeLogLockTable = true; createdTable = true; hasDatabaseChangeLogLockTable = true; } if (!isDatabaseChangeLogLockTableInitialized(createdTable)) { executor.comment("Initialize Database Lock Table"); executor.execute(new InitializeDatabaseChangeLogLockTableStatement()); database.commit(); } if (executor.updatesDatabase() && (database instanceof DerbyDatabase) && ((DerbyDatabase) database) .supportsBooleanDataType() || database.getClass().isAssignableFrom(DB2Database.class) && ((DB2Database) database) .supportsBooleanDataType()) { //check if the changelog table is of an old smallint vs. boolean format String lockTable = database.escapeTableName( database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName() ); Object obj = executor.queryForObject( new RawSqlStatement( "SELECT MIN(locked) AS test FROM " + lockTable + " FETCH FIRST ROW ONLY" ), Object.class ); if (!(obj instanceof Boolean)) { //wrong type, need to recreate table executor.execute( new DropTableStatement( database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName(), false ) ); executor.execute(new CreateDatabaseChangeLogLockTableStatement()); executor.execute(new InitializeDatabaseChangeLogLockTableStatement()); } } } public boolean isDatabaseChangeLogLockTableInitialized(final boolean tableJustCreated) throws DatabaseException { if (!isDatabaseChangeLogLockTableInitialized) { Executor executor = ExecutorService.getInstance().getExecutor(database); try { isDatabaseChangeLogLockTableInitialized = executor.queryForInt( new RawSqlStatement("SELECT COUNT(*) FROM " + database.escapeTableName( database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName(), database.getDatabaseChangeLogLockTableName() ) ) ) > 0; } catch (LiquibaseException e) { if (executor.updatesDatabase()) { throw new UnexpectedLiquibaseException(e); } else { //probably didn't actually create the table yet. isDatabaseChangeLogLockTableInitialized = !tableJustCreated; } } } return isDatabaseChangeLogLockTableInitialized; } @Override public boolean hasChangeLogLock() { return hasChangeLogLock; } public boolean hasDatabaseChangeLogLockTable() throws DatabaseException { if (hasDatabaseChangeLogLockTable == null) { try { hasDatabaseChangeLogLockTable = SnapshotGeneratorFactory.getInstance() .hasDatabaseChangeLogLockTable(database); } catch (LiquibaseException e) { throw new UnexpectedLiquibaseException(e); } } return hasDatabaseChangeLogLockTable; } @Override public void waitForLock() throws LockException { boolean locked = false; long timeToGiveUp = new Date().getTime() + (getChangeLogLockWaitTime() * 1000 * 60); while (!locked && (new Date().getTime() < timeToGiveUp)) { locked = acquireLock(); if (!locked) { Scope.getCurrentScope().getLog(getClass()).info("Waiting for changelog lock...."); try { Thread.sleep(getChangeLogLockRecheckTime() * 1000); } catch (InterruptedException e) { // Restore thread interrupt status Thread.currentThread().interrupt(); } } } if (!locked) { DatabaseChangeLogLock[] locks = listLocks(); String lockedBy; if (locks.length > 0) { DatabaseChangeLogLock lock = locks[0]; lockedBy = lock.getLockedBy() + " since " + DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT) .format(lock.getLockGranted()); } else { lockedBy = "UNKNOWN"; } throw new LockException("Could not acquire change log lock. Currently locked by " + lockedBy); } } @Override public boolean acquireLock() throws LockException { if (hasChangeLogLock) { return true; } quotingStrategy = database.getObjectQuotingStrategy(); Executor executor = ExecutorService.getInstance().getExecutor(database); try { database.rollback(); this.init(); Boolean locked = ExecutorService.getInstance().getExecutor(database).queryForObject( new SelectFromDatabaseChangeLogLockStatement("LOCKED"), Boolean.class ); if (locked) { return false; } else { executor.comment("Lock Database"); int rowsUpdated = executor.update(new LockDatabaseChangeLogStatement()); if ((rowsUpdated == -1) && (database instanceof MSSQLDatabase)) { Scope.getCurrentScope().getLog(getClass()).fine( "Database did not return a proper row count (Might have NOCOUNT enabled)" ); database.rollback(); Sql[] sql = SqlGeneratorFactory.getInstance().generateSql( new LockDatabaseChangeLogStatement(), database ); if (sql.length != 1) { throw new UnexpectedLiquibaseException("Did not expect "+sql.length+" statements"); } rowsUpdated = executor.update(new RawSqlStatement("EXEC sp_executesql N'SET NOCOUNT OFF " + sql[0].toSql().replace("'", "''") + "'")); } if (rowsUpdated > 1) { throw new LockException("Did not update change log lock correctly"); } if (rowsUpdated == 0) { // another node was faster return false; } database.commit(); Scope.getCurrentScope().getLog(getClass()).info(coreBundle.getString("successfully.acquired.change.log.lock")); hasChangeLogLock = true; database.setCanCacheLiquibaseTableInfo(true); return true; } } catch (Exception e) { throw new LockException(e); } finally { try { database.rollback(); } catch (DatabaseException e) { } } } @Override public void releaseLock() throws LockException { ObjectQuotingStrategy incomingQuotingStrategy = null; if (this.quotingStrategy != null) { incomingQuotingStrategy = database.getObjectQuotingStrategy(); database.setObjectQuotingStrategy(this.quotingStrategy); } Executor executor = ExecutorService.getInstance().getExecutor(database); try { if (this.hasDatabaseChangeLogLockTable()) { executor.comment("Release Database Lock"); database.rollback(); int updatedRows = executor.update(new UnlockDatabaseChangeLogStatement()); if ((updatedRows == -1) && (database instanceof MSSQLDatabase)) { Scope.getCurrentScope().getLog(getClass()).fine( "Database did not return a proper row count (Might have NOCOUNT enabled.)" ); database.rollback(); Sql[] sql = SqlGeneratorFactory.getInstance().generateSql( new UnlockDatabaseChangeLogStatement(), database ); if (sql.length != 1) { throw new UnexpectedLiquibaseException("Did not expect "+sql.length+" statements"); } updatedRows = executor.update( new RawSqlStatement( "EXEC sp_executesql N'SET NOCOUNT OFF " + sql[0].toSql().replace("'", "''") + "'" ) ); } if (updatedRows != 1) { throw new LockException( "Did not update change log lock correctly.\n\n" + updatedRows + " rows were updated instead of the expected 1 row using executor " + executor.getClass().getName() + "" + " there are " + executor.queryForInt( new RawSqlStatement( "SELECT COUNT(*) FROM " + database.getDatabaseChangeLogLockTableName() ) ) + " rows in the table" ); } database.commit(); } } catch (Exception e) { throw new LockException(e); } finally { try { hasChangeLogLock = false; database.setCanCacheLiquibaseTableInfo(false); Scope.getCurrentScope().getLog(getClass()).info("Successfully released change log lock"); database.rollback(); } catch (DatabaseException e) { } if (incomingQuotingStrategy != null) { database.setObjectQuotingStrategy(incomingQuotingStrategy); } } } @Override public DatabaseChangeLogLock[] listLocks() throws LockException { try { if (!this.hasDatabaseChangeLogLockTable()) { return new DatabaseChangeLogLock[0]; } List<DatabaseChangeLogLock> allLocks = new ArrayList<>(); SqlStatement sqlStatement = new SelectFromDatabaseChangeLogLockStatement( "ID", "LOCKED", "LOCKGRANTED", "LOCKEDBY" ); List<Map<String, ?>> rows = ExecutorService.getInstance().getExecutor(database).queryForList(sqlStatement); for (Map columnMap : rows) { Object lockedValue = columnMap.get("LOCKED"); Boolean locked; if (lockedValue instanceof Number) { locked = ((Number) lockedValue).intValue() == 1; } else { locked = (Boolean) lockedValue; } if ((locked != null) && locked) { allLocks.add( new DatabaseChangeLogLock( ((Number) columnMap.get("ID")).intValue(), (Date) columnMap.get("LOCKGRANTED"), (String) columnMap.get("LOCKEDBY") ) ); } } return allLocks.toArray(new DatabaseChangeLogLock[allLocks.size()]); } catch (Exception e) { throw new LockException(e); } } @Override public void forceReleaseLock() throws LockException, DatabaseException { this.init(); releaseLock(); /*try { releaseLock(); } catch (LockException e) { // ignore ? Scope.getCurrentScope().getLog(getClass()).info("Ignored exception in forceReleaseLock: " + e.getMessage()); }*/ } @Override public void reset() { hasChangeLogLock = false; hasDatabaseChangeLogLockTable = null; isDatabaseChangeLogLockTableInitialized = false; } @Override public void destroy() throws DatabaseException { try { // // This code now uses the ChangeGeneratorFactory to // allow extension code to be called in order to // delete the changelog lock table. // // To implement the extension, you will need to override: // DropTableStatement // DropTableChange // DropTableGenerator // // DatabaseObject example = new Table().setName(database.getDatabaseChangeLogLockTableName()) .setSchema(database.getLiquibaseCatalogName(), database.getLiquibaseSchemaName()); if (SnapshotGeneratorFactory.getInstance().has(example, database)) { DatabaseObject table = SnapshotGeneratorFactory.getInstance().createSnapshot(example, database); DiffOutputControl diffOutputControl = new DiffOutputControl(true, true, false, null); Change[] change = ChangeGeneratorFactory.getInstance().fixUnexpected(table, diffOutputControl, database, database); SqlStatement[] sqlStatement = change[0].generateStatements(database); ExecutorService.getInstance().getExecutor(database).execute(sqlStatement[0]); } reset(); } catch (InvalidExampleException e) { throw new UnexpectedLiquibaseException(e); } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.editor.impl; import com.intellij.codeStyle.CodeStyleFacade; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.LazyRangeMarkerFactory; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import consulo.util.dataholder.Key; import consulo.util.dataholder.UserDataHolderBase; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.WeakList; import javax.annotation.Nonnull; import javax.annotation.Nullable; import jakarta.inject.Inject; import jakarta.inject.Singleton; import java.util.List; @Singleton public class LazyRangeMarkerFactoryImpl extends LazyRangeMarkerFactory { private final Project myProject; private static final Key<WeakList<LazyMarker>> LAZY_MARKERS_KEY = Key.create("LAZY_MARKERS_KEY"); @Inject public LazyRangeMarkerFactoryImpl(@Nonnull Project project, @Nonnull final FileDocumentManager fileDocumentManager) { myProject = project; EditorFactory.getInstance().getEventMulticaster().addDocumentListener(new DocumentAdapter() { @Override public void beforeDocumentChange(DocumentEvent e) { transformRangeMarkers(e); } @Override public void documentChanged(DocumentEvent e) { transformRangeMarkers(e); } private void transformRangeMarkers(@Nonnull DocumentEvent e) { Document document = e.getDocument(); VirtualFile file = fileDocumentManager.getFile(document); if (file == null || myProject.isDisposed()) { return; } WeakList<LazyMarker> lazyMarkers = getMarkers(file); if (lazyMarkers == null) { return; } List<LazyMarker> markers = lazyMarkers.toStrongList(); for (LazyMarker marker : markers) { if (file.equals(marker.getFile())) { marker.getOrCreateDelegate(); } } } }, project); } static WeakList<LazyMarker> getMarkers(@Nonnull VirtualFile file) { return file.getUserData(LazyRangeMarkerFactoryImpl.LAZY_MARKERS_KEY); } private static void addToLazyMarkersList(@Nonnull LazyMarker marker, @Nonnull VirtualFile file) { WeakList<LazyMarker> markers = getMarkers(file); if (markers == null) { markers = file.putUserDataIfAbsent(LAZY_MARKERS_KEY, new WeakList<>()); } markers.add(marker); } private static void removeFromLazyMarkersList(@Nonnull LazyMarker marker, @Nonnull VirtualFile file) { WeakList<LazyMarker> markers = getMarkers(file); if (markers != null) { markers.remove(marker); } } @Override @Nonnull public RangeMarker createRangeMarker(@Nonnull final VirtualFile file, final int offset) { return ApplicationManager.getApplication().runReadAction(new Computable<RangeMarker>() { @Override public RangeMarker compute() { // even for already loaded document do not create range marker yet - wait until it really needed when e.g. user clicked to jump to OpenFileDescriptor final LazyMarker marker = new OffsetLazyMarker(file, offset); addToLazyMarkersList(marker, file); return marker; } }); } @Override @Nonnull public RangeMarker createRangeMarker(@Nonnull final VirtualFile file, final int line, final int column, final boolean persistent) { return ApplicationManager.getApplication().runReadAction(new Computable<RangeMarker>() { @Override public RangeMarker compute() { final Document document = FileDocumentManager.getInstance().getCachedDocument(file); if (document != null) { int myTabSize = CodeStyleFacade.getInstance(myProject).getTabSize(file.getFileType()); final int offset = calculateOffset(document, line, column, myTabSize); return document.createRangeMarker(offset, offset, persistent); } final LazyMarker marker = new LineColumnLazyMarker(myProject, file, line, column); addToLazyMarkersList(marker, file); return marker; } }); } abstract static class LazyMarker extends UserDataHolderBase implements RangeMarker { protected RangeMarker myDelegate; // the real range marker which is created only when document is opened, or (this) which means it's disposed protected final VirtualFile myFile; protected final int myInitialOffset; private LazyMarker(@Nonnull VirtualFile file, int offset) { myFile = file; myInitialOffset = offset; } boolean isDelegated() { return myDelegate != null; } @Nonnull public VirtualFile getFile() { return myFile; } @Nullable final RangeMarker getOrCreateDelegate() { if (myDelegate == null) { Document document = FileDocumentManager.getInstance().getDocument(myFile); if (document == null) { return null; } myDelegate = createDelegate(myFile, document); removeFromLazyMarkersList(this, myFile); } return isDisposed() ? null : myDelegate; } @Nullable protected abstract RangeMarker createDelegate(@Nonnull VirtualFile file, @Nonnull Document document); @Override @Nonnull public Document getDocument() { RangeMarker delegate = getOrCreateDelegate(); if (delegate == null) { //noinspection ConstantConditions return FileDocumentManager.getInstance().getDocument(myFile); } return delegate.getDocument(); } @Override public int getStartOffset() { return myDelegate == null || isDisposed() ? myInitialOffset : myDelegate.getStartOffset(); } public boolean isDisposed() { return myDelegate == this; } @Override public int getEndOffset() { return myDelegate == null || isDisposed() ? myInitialOffset : myDelegate.getEndOffset(); } @Override public boolean isValid() { RangeMarker delegate = getOrCreateDelegate(); return delegate != null && !isDisposed() && delegate.isValid(); } @Override public void setGreedyToLeft(boolean greedy) { getOrCreateDelegate().setGreedyToLeft(greedy); } @Override public void setGreedyToRight(boolean greedy) { getOrCreateDelegate().setGreedyToRight(greedy); } @Override public boolean isGreedyToRight() { return getOrCreateDelegate().isGreedyToRight(); } @Override public boolean isGreedyToLeft() { return getOrCreateDelegate().isGreedyToLeft(); } @Override public void dispose() { assert !isDisposed(); RangeMarker delegate = myDelegate; if (delegate == null) { removeFromLazyMarkersList(this, myFile); myDelegate = this; // mark of disposed marker } else { delegate.dispose(); } } } private static class OffsetLazyMarker extends LazyMarker { private OffsetLazyMarker(@Nonnull VirtualFile file, int offset) { super(file, offset); } @Override public boolean isValid() { RangeMarker delegate = myDelegate; if (delegate == null) { Document document = FileDocumentManager.getInstance().getDocument(myFile); return document != null; } return super.isValid(); } @Override @Nonnull public RangeMarker createDelegate(@Nonnull VirtualFile file, @Nonnull final Document document) { final int offset = Math.min(myInitialOffset, document.getTextLength()); return document.createRangeMarker(offset, offset); } } private static class LineColumnLazyMarker extends LazyMarker { private final int myLine; private final int myColumn; private final int myTabSize; private LineColumnLazyMarker(@Nonnull Project project, @Nonnull VirtualFile file, int line, int column) { super(file, -1); myLine = line; myColumn = column; myTabSize = CodeStyleFacade.getInstance(project).getTabSize(file.getFileType()); } @Override @Nullable public RangeMarker createDelegate(@Nonnull VirtualFile file, @Nonnull Document document) { if (document.getTextLength() == 0 && !(myLine == 0 && myColumn == 0)) { return null; } int offset = calculateOffset(document, myLine, myColumn, myTabSize); return document.createRangeMarker(offset, offset); } @Override public boolean isValid() { RangeMarker delegate = myDelegate; if (delegate == null) { Document document = FileDocumentManager.getInstance().getDocument(myFile); return document != null && (document.getTextLength() != 0 || myLine == 0 && myColumn == 0); } return super.isValid(); } @Override public int getStartOffset() { getOrCreateDelegate(); return super.getStartOffset(); } @Override public int getEndOffset() { getOrCreateDelegate(); return super.getEndOffset(); } } private static int calculateOffset(@Nonnull Document document, final int line, final int column, int tabSize) { int offset; if (0 <= line && line < document.getLineCount()) { final int lineStart = document.getLineStartOffset(line); final int lineEnd = document.getLineEndOffset(line); final CharSequence docText = document.getCharsSequence(); offset = lineStart; int col = 0; while (offset < lineEnd && col < column) { col += docText.charAt(offset) == '\t' ? tabSize : 1; offset++; } } else { offset = document.getTextLength(); } return offset; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tamaya.core; import org.apache.tamaya.Configuration; import org.apache.tamaya.TypeLiteral; import org.apache.tamaya.core.internal.CoreConfigurationBuilder; import org.apache.tamaya.spi.ConfigurationBuilder; import org.apache.tamaya.spi.ConfigurationContext; import org.apache.tamaya.spi.PropertyConverter; import org.apache.tamaya.spi.PropertyFilter; import org.apache.tamaya.spi.PropertySource; import org.junit.Test; import java.util.Arrays; import java.util.Comparator; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link ConfigurationBuilder} by atsticks on 06.09.16. */ public class ConfigurationBuilderTest { private TestPropertySource testPropertySource = new TestPropertySource(){}; @Test public void setContext() throws Exception { Configuration cfg = Configuration.current(); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .setConfiguration(cfg); assertThat(b.build()).isEqualTo(cfg); } @Test public void addPropertySources_Array() throws Exception { PropertySource testPS2 = new TestPropertySource("addPropertySources_Array", 1); ConfigurationBuilder b = new CoreConfigurationBuilder() .addPropertySources(testPropertySource, testPS2); Configuration cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); // Ensure no sorting happens during addPropertyValue, so switch ordinals! testPS2 = new TestPropertySource("addPropertySources_Array", 1); b = Configuration.createConfigurationBuilder() .addPropertySources(testPS2, testPropertySource); cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); assertThat("TestPropertySource").isEqualTo(cfg.getContext().getPropertySources().get(1).getName()); assertThat("addPropertySources_Array").isEqualTo(cfg.getContext().getPropertySources().get(0).getName()); } @Test public void addPropertySources_Collection() throws Exception { PropertySource testPS2 = new TestPropertySource("addPropertySources_Collection", 1); ConfigurationBuilder b = new CoreConfigurationBuilder() .addPropertySources(Arrays.asList(new PropertySource[]{testPropertySource, testPS2})); Configuration cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); assertThat("TestPropertySource").isEqualTo(cfg.getContext().getPropertySources().get(0).getName()); assertThat("addPropertySources_Collection").isEqualTo(cfg.getContext().getPropertySources().get(1).getName()); // Ensure no sorting happens during addPropertyValue, so switch ordinals! testPS2 = new TestPropertySource("addPropertySources_Collection", 1); b = Configuration.createConfigurationBuilder() .addPropertySources(Arrays.asList(new PropertySource[]{testPS2, testPropertySource})); cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); assertThat("TestPropertySource").isEqualTo(cfg.getContext().getPropertySources().get(1).getName()); assertThat("addPropertySources_Collection").isEqualTo(cfg.getContext().getPropertySources().get(0).getName()); } @Test public void removePropertySources_Array() throws Exception { PropertySource testPS2 = new TestPropertySource("removePropertySources_Array", 1); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertySources(testPropertySource, testPS2); Configuration cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); b = Configuration.createConfigurationBuilder() .addPropertySources(testPropertySource, testPS2); b.removePropertySources(testPropertySource); cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(1).contains(testPS2).doesNotContain(testPropertySource); } @Test public void removePropertySources_Collection() throws Exception { PropertySource testPS2 = new TestPropertySource("removePropertySources_Array", 1); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertySources(testPropertySource, testPS2); Configuration cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(2).contains(testPropertySource, testPS2); b = Configuration.createConfigurationBuilder() .addPropertySources(testPropertySource, testPS2); b.removePropertySources(testPropertySource); cfg = b.build(); assertThat(cfg.getContext().getPropertySources()).hasSize(1).doesNotContain(testPropertySource).contains(testPS2); } @Test public void addPropertyFilters_Array() throws Exception { PropertyFilter filter1 = (value, ctx) -> value; PropertyFilter filter2 = (value, ctx) -> value; ConfigurationBuilder b = Configuration.createConfigurationBuilder(); b.addPropertyFilters(filter1, filter2); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(2).contains(filter1, filter2); b = Configuration.createConfigurationBuilder(); b.addPropertyFilters(filter1, filter2); b.addPropertyFilters(filter1, filter2); assertThat(ctx.getPropertyFilters()).hasSize(2); } @Test public void addPropertyFilters_Collection() throws Exception { PropertyFilter filter1 = (value, ctx) -> value; PropertyFilter filter2 = (value, ctx) -> value; ConfigurationBuilder b = Configuration.createConfigurationBuilder(); b.addPropertyFilters(Arrays.asList(new PropertyFilter[]{filter1, filter2})); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(2).contains(filter1, filter2); b = Configuration.createConfigurationBuilder(); b.addPropertyFilters(filter1, filter2); b.addPropertyFilters(filter1, filter2); assertThat(ctx.getPropertyFilters()).hasSize(2); } @Test public void removePropertyFilters_Array() throws Exception { PropertyFilter filter1 = (value, ctx) -> value; PropertyFilter filter2 = (value, ctx) -> value; ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyFilters(filter1, filter2); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(2).contains(filter1, filter2); b = Configuration.createConfigurationBuilder() .addPropertyFilters(filter1, filter2); b.removePropertyFilters(filter1); cfg = b.build(); ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(1).doesNotContain(filter1).contains(filter2); } @Test public void removePropertyFilters_Collection() throws Exception { PropertyFilter filter1 = (value, ctx) -> value; PropertyFilter filter2 = (value, ctx) -> value; ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyFilters(Arrays.asList(new PropertyFilter[]{filter1, filter2})); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(2).contains(filter1, filter2); b = Configuration.createConfigurationBuilder() .addPropertyFilters(Arrays.asList(new PropertyFilter[]{filter1, filter2})); b.removePropertyFilters(filter1); cfg = b.build(); ctx = cfg.getContext(); assertThat(ctx.getPropertyFilters()).hasSize(1).doesNotContain(filter1).contains(filter2); } @Test @SuppressWarnings({ "rawtypes", "unchecked" }) public void addPropertyConverters_Array() throws Exception { PropertyConverter converter = (value, ctx) -> value.toLowerCase(); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), converter); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).contains(converter); assertThat(ctx.getPropertyConverters()).hasSize(1); b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), converter); b.addPropertyConverters(TypeLiteral.of(String.class), converter); assertThat(ctx.getPropertyConverters()).hasSize(1); } @Test @SuppressWarnings({ "rawtypes", "unchecked" }) public void addPropertyConverters_Collection() throws Exception { PropertyConverter converter = (value, ctx) -> value.toLowerCase(); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), Arrays.<PropertyConverter<Object>>asList(new PropertyConverter[]{converter})); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).contains(converter); assertThat(ctx.getPropertyConverters()).hasSize(1); b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), Arrays.<PropertyConverter<Object>>asList(new PropertyConverter[]{converter})); b.addPropertyConverters(TypeLiteral.of(String.class), converter); assertThat(ctx.getPropertyConverters()).hasSize(1); } @Test @SuppressWarnings({ "rawtypes", "unchecked" }) public void removePropertyConverters_Array() throws Exception { PropertyConverter converter = (value, ctx) -> value.toLowerCase(); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), converter); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).hasSize(1).contains(converter); b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), converter); b.removePropertyConverters(TypeLiteral.of(String.class), converter); cfg = b.build(); ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).isEmpty(); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void removePropertyConverters_Collection() throws Exception { PropertyConverter converter = (value, ctx) -> value.toLowerCase(); ConfigurationBuilder b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), Arrays.<PropertyConverter<Object>>asList(new PropertyConverter[]{converter})); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).hasSize(1).contains(converter); b = Configuration.createConfigurationBuilder() .addPropertyConverters(TypeLiteral.of(String.class), Arrays.<PropertyConverter<Object>>asList(new PropertyConverter[]{converter})); b.removePropertyConverters(TypeLiteral.of(String.class), Arrays.<PropertyConverter<Object>>asList(new PropertyConverter[]{converter})); cfg = b.build(); ctx = cfg.getContext(); assertThat(ctx.getPropertyConverters(TypeLiteral.of(String.class))).isEmpty(); } @Test public void increasePriority(){ ConfigurationBuilder b = Configuration.createConfigurationBuilder(); TestPropertySource[] propertySources = new TestPropertySource[10]; for(int i=0;i<propertySources.length;i++){ propertySources[i] = new TestPropertySource("ps"+i,i); } b.addPropertySources(propertySources); b.increasePriority(propertySources[propertySources.length-1]); for(int i=0;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } b.increasePriority(propertySources[propertySources.length-2]); for(int i=0;i<propertySources.length-2;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } assertThat(b.getPropertySources().get(propertySources.length-2)).isEqualTo(propertySources[propertySources.length-1]); assertThat(b.getPropertySources().get(propertySources.length-1)).isEqualTo(propertySources[propertySources.length-2]); } @Test public void decreasePriority(){ ConfigurationBuilder b = Configuration.createConfigurationBuilder(); TestPropertySource[] propertySources = new TestPropertySource[10]; for(int i=0;i<propertySources.length;i++){ propertySources[i] = new TestPropertySource("ps"+i,i); } b.addPropertySources(propertySources); b.decreasePriority(propertySources[0]); for(int i=0;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } b.decreasePriority(propertySources[1]); for(int i=2;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } assertThat(b.getPropertySources().get(1)).isEqualTo(propertySources[0]); assertThat(b.getPropertySources().get(0)).isEqualTo(propertySources[1]); } @Test public void lowestPriority(){ // setup ConfigurationBuilder b = Configuration.createConfigurationBuilder(); TestPropertySource[] propertySources = new TestPropertySource[10]; for(int i=0;i<propertySources.length;i++){ propertySources[i] = new TestPropertySource("ps"+i,i); } b.addPropertySources(propertySources); // test b.lowestPriority(propertySources[0]); for(int i=0;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } b.lowestPriority(propertySources[1]); for(int i=2;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } assertThat(b.getPropertySources().get(1)).isEqualTo(propertySources[0]); assertThat(b.getPropertySources().get(0)).isEqualTo(propertySources[1]); b.lowestPriority(propertySources[5]); assertThat(b.getPropertySources().get(0)).isEqualTo(propertySources[5]); } @Test public void highestPriority(){ // setup ConfigurationBuilder b = Configuration.createConfigurationBuilder(); TestPropertySource[] propertySources = new TestPropertySource[10]; for(int i=0;i<propertySources.length;i++){ propertySources[i] = new TestPropertySource("ps"+i,i); } b.addPropertySources(propertySources); // test b.highestPriority(propertySources[propertySources.length-1]); for(int i=0;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } b.highestPriority(propertySources[propertySources.length-2]); for(int i=0;i<propertySources.length-2;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } assertThat(b.getPropertySources().get(propertySources.length-1)).isEqualTo(propertySources[propertySources.length-2]); assertThat(b.getPropertySources().get(propertySources.length-2)).isEqualTo(propertySources[propertySources.length-1]); b.highestPriority(propertySources[5]); assertThat(b.getPropertySources().get(propertySources.length-1)).isEqualTo(propertySources[5]); } @Test public void sortPropertySources(){ // setup ConfigurationBuilder b = Configuration.createConfigurationBuilder(); TestPropertySource[] propertySources = new TestPropertySource[10]; for(int i=0;i<propertySources.length;i++){ propertySources[i] = new TestPropertySource("ps"+i,i); } b.addPropertySources(propertySources); Comparator<PropertySource> psComp = (o1, o2) -> o1.toString().compareTo(o2.toString()); // test b.sortPropertySources(psComp); Arrays.sort(propertySources, psComp); for(int i=0;i<propertySources.length;i++){ assertThat(b.getPropertySources().get(i)).isEqualTo(propertySources[i]); } } @Test public void sortPropertyFilter(){ // setup ConfigurationBuilder b = Configuration.createConfigurationBuilder(); PropertyFilter[] propertyFilters = new PropertyFilter[10]; for(int i=0;i<propertyFilters.length;i++){ propertyFilters[i] = (value, ctx) -> value.setValue(toString() + " - "); } b.addPropertyFilters(propertyFilters); Comparator<PropertyFilter> pfComp = (o1, o2) -> o1.toString().compareTo(o2.toString()); // test b.sortPropertyFilter(pfComp); Arrays.sort(propertyFilters, pfComp); for(int i=0;i<propertyFilters.length;i++){ assertThat(b.getPropertyFilters().get(i)).isEqualTo(propertyFilters[i]); } } @Test public void build() throws Exception { ConfigurationBuilder b = Configuration.createConfigurationBuilder(); Configuration cfg = b.build(); ConfigurationContext ctx = cfg.getContext(); assertThat(ctx).isNotNull(); assertThat(ctx.getPropertySources()).isEmpty(); assertThat(ctx.getPropertyFilters()).isEmpty(); } @Test public void testRemoveAllFilters() throws Exception { ConfigurationBuilder b = Configuration.createConfigurationBuilder(); b.addPropertyFilters((value, ctx) -> value.setValue(toString() + " - ")); assertThat(b.getPropertyFilters()).isNotEmpty(); b.removePropertyFilters(b.getPropertyFilters()); assertThat(b.getPropertyFilters()).isEmpty(); } @Test public void testRemoveAllSources() throws Exception { ConfigurationBuilder b = Configuration.createConfigurationBuilder(); b.addPropertySources(new TestPropertySource()); assertThat(b.getPropertySources()).isNotEmpty(); b.removePropertySources(b.getPropertySources()); assertThat(b.getPropertyFilters()).isEmpty(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/session.proto package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * Represents the input for dtmf event. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.DtmfInput} */ public final class DtmfInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.DtmfInput) DtmfInputOrBuilder { private static final long serialVersionUID = 0L; // Use DtmfInput.newBuilder() to construct. private DtmfInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DtmfInput() { digits_ = ""; finishDigit_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DtmfInput(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private DtmfInput( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); digits_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); finishDigit_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.SessionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_DtmfInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.SessionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_DtmfInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.class, com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.Builder.class); } public static final int DIGITS_FIELD_NUMBER = 1; private volatile java.lang.Object digits_; /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The digits. */ @java.lang.Override public java.lang.String getDigits() { java.lang.Object ref = digits_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); digits_ = s; return s; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The bytes for digits. */ @java.lang.Override public com.google.protobuf.ByteString getDigitsBytes() { java.lang.Object ref = digits_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); digits_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FINISH_DIGIT_FIELD_NUMBER = 2; private volatile java.lang.Object finishDigit_; /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The finishDigit. */ @java.lang.Override public java.lang.String getFinishDigit() { java.lang.Object ref = finishDigit_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); finishDigit_ = s; return s; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The bytes for finishDigit. */ @java.lang.Override public com.google.protobuf.ByteString getFinishDigitBytes() { java.lang.Object ref = finishDigit_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); finishDigit_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(digits_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, digits_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(finishDigit_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, finishDigit_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(digits_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, digits_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(finishDigit_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, finishDigit_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.DtmfInput)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.DtmfInput other = (com.google.cloud.dialogflow.cx.v3beta1.DtmfInput) obj; if (!getDigits().equals(other.getDigits())) return false; if (!getFinishDigit().equals(other.getFinishDigit())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DIGITS_FIELD_NUMBER; hash = (53 * hash) + getDigits().hashCode(); hash = (37 * hash) + FINISH_DIGIT_FIELD_NUMBER; hash = (53 * hash) + getFinishDigit().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3beta1.DtmfInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents the input for dtmf event. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.DtmfInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.DtmfInput) com.google.cloud.dialogflow.cx.v3beta1.DtmfInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.SessionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_DtmfInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.SessionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_DtmfInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.class, com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); digits_ = ""; finishDigit_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.SessionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_DtmfInput_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.DtmfInput getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.DtmfInput build() { com.google.cloud.dialogflow.cx.v3beta1.DtmfInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.DtmfInput buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.DtmfInput result = new com.google.cloud.dialogflow.cx.v3beta1.DtmfInput(this); result.digits_ = digits_; result.finishDigit_ = finishDigit_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.DtmfInput) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.DtmfInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.DtmfInput other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.DtmfInput.getDefaultInstance()) return this; if (!other.getDigits().isEmpty()) { digits_ = other.digits_; onChanged(); } if (!other.getFinishDigit().isEmpty()) { finishDigit_ = other.finishDigit_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.cx.v3beta1.DtmfInput parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.cx.v3beta1.DtmfInput) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object digits_ = ""; /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The digits. */ public java.lang.String getDigits() { java.lang.Object ref = digits_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); digits_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return The bytes for digits. */ public com.google.protobuf.ByteString getDigitsBytes() { java.lang.Object ref = digits_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); digits_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @param value The digits to set. * @return This builder for chaining. */ public Builder setDigits(java.lang.String value) { if (value == null) { throw new NullPointerException(); } digits_ = value; onChanged(); return this; } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @return This builder for chaining. */ public Builder clearDigits() { digits_ = getDefaultInstance().getDigits(); onChanged(); return this; } /** * * * <pre> * The dtmf digits. * </pre> * * <code>string digits = 1;</code> * * @param value The bytes for digits to set. * @return This builder for chaining. */ public Builder setDigitsBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); digits_ = value; onChanged(); return this; } private java.lang.Object finishDigit_ = ""; /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The finishDigit. */ public java.lang.String getFinishDigit() { java.lang.Object ref = finishDigit_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); finishDigit_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return The bytes for finishDigit. */ public com.google.protobuf.ByteString getFinishDigitBytes() { java.lang.Object ref = finishDigit_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); finishDigit_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @param value The finishDigit to set. * @return This builder for chaining. */ public Builder setFinishDigit(java.lang.String value) { if (value == null) { throw new NullPointerException(); } finishDigit_ = value; onChanged(); return this; } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @return This builder for chaining. */ public Builder clearFinishDigit() { finishDigit_ = getDefaultInstance().getFinishDigit(); onChanged(); return this; } /** * * * <pre> * The finish digit (if any). * </pre> * * <code>string finish_digit = 2;</code> * * @param value The bytes for finishDigit to set. * @return This builder for chaining. */ public Builder setFinishDigitBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); finishDigit_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.DtmfInput) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.DtmfInput) private static final com.google.cloud.dialogflow.cx.v3beta1.DtmfInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.DtmfInput(); } public static com.google.cloud.dialogflow.cx.v3beta1.DtmfInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DtmfInput> PARSER = new com.google.protobuf.AbstractParser<DtmfInput>() { @java.lang.Override public DtmfInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new DtmfInput(input, extensionRegistry); } }; public static com.google.protobuf.Parser<DtmfInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DtmfInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.DtmfInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package org.robolectric.shadows; import android.view.ViewGroup.LayoutParams; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RuntimeEnvironment; import org.robolectric.Shadows; import org.robolectric.TestRunners; import java.util.HashMap; import java.util.Map; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertFalse; import static org.robolectric.Shadows.shadowOf; @RunWith(TestRunners.MultiApiWithDefaults.class) public class ShadowWebViewTest { private WebView webView; private ShadowWebView shadowWebView; @Before public void setUp() throws Exception { webView = new WebView(RuntimeEnvironment.application); shadowWebView = Shadows.shadowOf(webView); } @Test public void shouldRecordLastLoadedUrl() { webView.loadUrl("http://example.com"); assertThat(shadowOf(webView).getLastLoadedUrl()).isEqualTo("http://example.com"); } @Test public void shouldRecordLastLoadedUrlForRequestWithAdditionalHeaders() { webView.loadUrl("http://example.com", null); assertThat(shadowOf(webView).getLastLoadedUrl()).isEqualTo("http://example.com"); assertThat(shadowOf(webView).getLastAdditionalHttpHeaders()).isNull(); Map<String, String> additionalHttpHeaders = new HashMap<>(1); additionalHttpHeaders.put("key1", "value1"); webView.loadUrl("http://example.com", additionalHttpHeaders); assertThat(shadowOf(webView).getLastLoadedUrl()).isEqualTo("http://example.com"); assertThat(shadowOf(webView).getLastAdditionalHttpHeaders()).isNotNull(); assertThat(shadowOf(webView).getLastAdditionalHttpHeaders()).containsKey("key1"); assertThat(shadowOf(webView).getLastAdditionalHttpHeaders().get("key1")).isEqualTo("value1"); } @Test public void shouldRecordLastLoadedData() { webView.loadData("<html><body><h1>Hi</h1></body></html>", "text/html", "utf-8"); ShadowWebView.LoadData lastLoadData = shadowOf(webView).getLastLoadData(); assertThat(lastLoadData.data).isEqualTo("<html><body><h1>Hi</h1></body></html>"); assertThat(lastLoadData.mimeType).isEqualTo("text/html"); assertThat(lastLoadData.encoding).isEqualTo("utf-8"); } @Test public void shouldRecordLastLoadDataWithBaseURL() throws Exception { webView.loadDataWithBaseURL("base/url", "<html><body><h1>Hi</h1></body></html>", "text/html", "utf-8", "history/url"); ShadowWebView.LoadDataWithBaseURL lastLoadData = shadowOf(webView).getLastLoadDataWithBaseURL(); assertThat(lastLoadData.baseUrl).isEqualTo("base/url"); assertThat(lastLoadData.data).isEqualTo("<html><body><h1>Hi</h1></body></html>"); assertThat(lastLoadData.mimeType).isEqualTo("text/html"); assertThat(lastLoadData.encoding).isEqualTo("utf-8"); assertThat(lastLoadData.historyUrl).isEqualTo("history/url"); } @Test public void shouldReturnSettings() { WebSettings webSettings = webView.getSettings(); assertThat(webSettings).isNotNull(); } @Test public void shouldRecordWebViewClient() { WebViewClient webViewClient = new WebViewClient(); assertThat(shadowWebView.getWebViewClient()).isNull(); webView.setWebViewClient(webViewClient); assertThat(shadowWebView.getWebViewClient()).isSameAs(webViewClient); } @Test public void shouldRecordWebChromeClient() { WebChromeClient webChromeClient = new WebChromeClient(); assertThat(shadowWebView.getWebChromeClient()).isNull(); webView.setWebChromeClient(webChromeClient); assertThat(shadowWebView.getWebChromeClient()).isSameAs(webChromeClient); } @Test public void shouldRecordJavascriptInteraces() { String[] names = {"name1", "name2"}; for (String name : names) { Object obj = new Object(); assertThat(shadowWebView.getJavascriptInterface(name)).isNull(); webView.addJavascriptInterface(obj, name); assertThat(shadowWebView.getJavascriptInterface(name)).isSameAs(obj); } } @Test public void shouldStartPostRun() { Runnable testRun = new Runnable() { public void run() { //Do something... return; } }; assertThat(shadowWebView.getRunFlag()).isFalse(); shadowWebView.post(testRun); assertThat(shadowWebView.getRunFlag()).isTrue(); } @Test public void shouldStoreCanGoBack() throws Exception { shadowWebView.setCanGoBack(false); assertFalse(webView.canGoBack()); shadowWebView.setCanGoBack(true); assertTrue(webView.canGoBack()); } @Test public void shouldStoreTheNumberOfTimesGoBackWasCalled() throws Exception { assertEquals(0, shadowWebView.getGoBackInvocations()); webView.goBack(); assertEquals(1, shadowWebView.getGoBackInvocations()); webView.goBack(); webView.goBack(); assertEquals(3, shadowWebView.getGoBackInvocations()); } @Test public void shouldRecordClearCacheWithoutDiskFiles() { assertThat(shadowWebView.wasClearCacheCalled()).isFalse(); webView.clearCache(false); assertThat(shadowWebView.wasClearCacheCalled()).isTrue(); assertThat(shadowWebView.didClearCacheIncludeDiskFiles()).isFalse(); } @Test public void shouldRecordClearCacheWithDiskFiles() { assertThat(shadowWebView.wasClearCacheCalled()).isFalse(); webView.clearCache(true); assertThat(shadowWebView.wasClearCacheCalled()).isTrue(); assertThat(shadowWebView.didClearCacheIncludeDiskFiles()).isTrue(); } @Test public void shouldRecordClearFormData() { assertThat(shadowWebView.wasClearFormDataCalled()).isFalse(); webView.clearFormData(); assertThat(shadowWebView.wasClearFormDataCalled()).isTrue(); } @Test public void shouldRecordClearHistory() { assertThat(shadowWebView.wasClearHistoryCalled()).isFalse(); webView.clearHistory(); assertThat(shadowWebView.wasClearHistoryCalled()).isTrue(); } @Test public void shouldRecordClearView() { assertThat(shadowWebView.wasClearViewCalled()).isFalse(); webView.clearView(); assertThat(shadowWebView.wasClearViewCalled()).isTrue(); } @Test public void shouldRecordDestroy() { assertThat(shadowWebView.wasDestroyCalled()).isFalse(); webView.destroy(); assertThat(shadowWebView.wasDestroyCalled()).isTrue(); } @Test public void shouldRecordOnPause() { assertThat(shadowWebView.wasOnPauseCalled()).isFalse(); webView.onPause(); assertThat(shadowWebView.wasOnPauseCalled()).isTrue(); } @Test public void shouldRecordOnResume() { assertThat(shadowWebView.wasOnResumeCalled()).isFalse(); webView.onResume(); assertThat(shadowWebView.wasOnResumeCalled()).isTrue(); } @Test public void shouldReturnPreviouslySetLayoutParams() { assertThat(webView.getLayoutParams()).isNull(); LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); webView.setLayoutParams(params); assertThat(webView.getLayoutParams()).isSameAs(params); } }
/* * This file is released under terms of BSD license * See LICENSE file for more information */ package claw.wani.serialization; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import claw.tatsu.xcodeml.abstraction.FunctionCall; import claw.tatsu.xcodeml.xnode.common.Xattr; import claw.tatsu.xcodeml.xnode.common.Xcode; import claw.tatsu.xcodeml.xnode.common.XcodeProgram; import claw.tatsu.xcodeml.xnode.common.Xnode; import claw.tatsu.xcodeml.xnode.common.Xscope; import claw.tatsu.xcodeml.xnode.fortran.FfunctionDefinition; import claw.tatsu.xcodeml.xnode.fortran.FfunctionType; import claw.tatsu.xcodeml.xnode.fortran.FortranType; import claw.wani.x2t.configuration.Configuration; /** * Helper class to insert serialization call in XcodeML/F * * @author clementval */ public class Serialization { private static final String SER_PPSER_SAVEPOINT = "ppser_savepoint"; private static final String SER_PPSER_SERIALIZER = "ppser_serializer"; private static final String SER_PPSER_SERIALIZER_REF = "ppser_serializer_ref"; private static final String SER_PPSER_ZPERTURB = "ppser_zrperturb"; private static final String SER_FS_CREATE_SAVEPOINT = "fs_create_savepoint"; private static final String SER_FS_ADD_SP_METAINFO = "fs_add_savepoint_metainfo"; private static final String SER_FS_WRITE_FIELD = "fs_write_field"; private static final String SER_FS_READ_FIELD = "fs_read_field"; private static final String SER_MODULE_M_SERIALIZE = "m_serialize"; private static final String SER_MODULE_UTILS_PPSER = "utils_ppser"; private static final String SAVEPOINT_IN_SUFFIX = "in"; private static final String SAVEPOINT_OUT_SUFFIX = "out"; // Avoid potential instantiation of this class private Serialization() { } private enum SerializationCall { SER_ADD_METAINFO, SER_READ, SER_WRITE, SER_READ_PERTURB } /** * Create function call to fs_create_savepoint * * @param xcodeml Current XcodeML translation unit. * @param savepointName Name of the savepoint. * @return Newly create functionCall node. */ private static Xnode createSavepoint(XcodeProgram xcodeml, String savepointName) { FfunctionType serType = xcodeml.createSubroutineType(); // Create the char constant type Xnode nameArg = xcodeml.createCharConstant(savepointName); Xnode savepointArg = xcodeml.createVar(FortranType.STRUCT, SER_PPSER_SAVEPOINT, Xscope.GLOBAL); Xnode serCall = xcodeml.createFctCall(serType, SER_FS_CREATE_SAVEPOINT); serCall.matchDescendant(Xcode.ARGUMENTS).append(nameArg).append(savepointArg); return xcodeml.createNode(Xcode.EXPR_STATEMENT).insert(serCall); } /** * Create a write field function call to the serialization library. * * @param xcodeml Current XcodeML translation unit. * @param savepointName Name of the savepoint. * @param field Representation of the field. * @return Newly created exprStmt node encapsulating the function call. */ private static Xnode createWriteFieldCall(XcodeProgram xcodeml, String savepointName, String field, String fieldName) { return createReadWriteFctCall(xcodeml, savepointName, field, fieldName, SerializationCall.SER_WRITE); } /** * Create a read field function call to the serialization library. * * @param xcodeml Current XcodeML translation unit. * @param savepointName Name of the savepoint. * @param field Representation of the field. * @return Newly created exprStmt node encapsulating the function call. */ private static Xnode createReadFieldCall(XcodeProgram xcodeml, String savepointName, String field, String fieldName) { return createReadWriteFctCall(xcodeml, savepointName, field, fieldName, SerializationCall.SER_READ); } /** * Create the skeletion of a function call for serialization functions. * * @param xcodeml Current XcodeML translation unit. * @param callType Type of call for the * @return Newly create functionCall node. */ private static FunctionCall createBaseSerFctCall(XcodeProgram xcodeml, SerializationCall callType) { FfunctionType serType = xcodeml.createSubroutineType(); Xnode savepointArg = xcodeml.createVar(FortranType.STRUCT, SER_PPSER_SAVEPOINT, Xscope.GLOBAL); String serFctName; switch (callType) { case SER_READ: case SER_READ_PERTURB: serFctName = SER_FS_READ_FIELD; break; case SER_ADD_METAINFO: serFctName = SER_FS_ADD_SP_METAINFO; break; case SER_WRITE: default: serFctName = SER_FS_WRITE_FIELD; } FunctionCall serCall = xcodeml.createFctCall(serType, serFctName); if (callType == SerializationCall.SER_WRITE) { Xnode serializerArg = xcodeml.createVar(FortranType.STRUCT, SER_PPSER_SERIALIZER, Xscope.GLOBAL); serCall.addArguments(serializerArg); } else if (callType == SerializationCall.SER_READ || callType == SerializationCall.SER_READ_PERTURB) { Xnode serializerArg = xcodeml.createVar(FortranType.STRUCT, SER_PPSER_SERIALIZER_REF, Xscope.GLOBAL); serCall.addArguments(serializerArg); } serCall.addArguments(savepointArg); return serCall; } /** * @param xcodeml Current XcodeML translation unit. * @param savepointName Name of the savepoint (will be part of the serialized * name) * @param field Representation of the field. * @param callType Type of serialization call from the enum. * @return exprStmt node created with the specific function call inside. */ private static Xnode createReadWriteFctCall(XcodeProgram xcodeml, String savepointName, String field, String fieldName, SerializationCall callType) { // Create the char constant type Xnode nameArg = xcodeml.createCharConstant(savepointName + "_" + fieldName); Xnode varArg = xcodeml.createVar(FortranType.REAL, field, Xscope.GLOBAL); FunctionCall serCall = createBaseSerFctCall(xcodeml, callType); serCall.addArguments(nameArg); serCall.addArguments(varArg); if (callType == SerializationCall.SER_READ_PERTURB) { Xnode perturbArg = xcodeml.createVar(FortranType.REAL, SER_PPSER_ZPERTURB, Xscope.GLOBAL); serCall.addArguments(perturbArg); } Xnode exprStmt = xcodeml.createNode(Xcode.EXPR_STATEMENT); exprStmt.insert(serCall); return exprStmt; } /** * Create a fs_add_savepoint_metainfo call to the serialization library. * * @param xcodeml Current XcodeML translation unit. * @param key Metadata key. * @param value Metadata value. * @return exprStmt node created with the specific function call inside. */ private static Xnode createAddMetaInfoCall(XcodeProgram xcodeml, String key, String value) { FunctionCall serCall = createBaseSerFctCall(xcodeml, SerializationCall.SER_ADD_METAINFO); // Create the char constant type Xnode metadataName = xcodeml.createCharConstant(key); serCall.addArguments(metadataName); if (value.contains("%")) { String[] values = value.split("%"); serCall.addArguments(xcodeml.createNode(Xcode.F_MEMBER_REF).setAttribute(Xattr.MEMBER, values[1]).append( xcodeml.createNode(Xcode.VAR_REF).append(xcodeml.createNode(Xcode.VAR).setValue(values[0])))); } else { serCall.addArguments(xcodeml.createNode(Xcode.VAR).setValue(value)); } return xcodeml.createNode(Xcode.EXPR_STATEMENT).insert(serCall); } /** * Create function calls to the serialization library to write a savepoint. * * @param xcodeml Current XcodeML translation unit. * @param hook Hook for node insertion. * @param metadata Key=value information for metadata. * @param fields List of fields to be written. * @param savepointName Name of the savepoint. * @return Last inserted node. */ public static Xnode generateWriteSavepoint(Configuration cfg, XcodeProgram xcodeml, Xnode hook, Map<String, String> metadata, List<String> fields, String savepointName, SerializationStep step) { return generateSavepoint(cfg, xcodeml, hook, metadata, fields, savepointName, step, SerializationMode.WRITE); } /** * Create function calls to the serialization library to read a savepoint. * * @param xcodeml Current XcodeML translation unit. * @param hook Hook for node insertion. * @param metadata Key=value information for metadata. * @param fields List of fields to be written. * @param savepointName Name of the savepoint. * @return Last inserted node. */ public static Xnode generateReadSavepoint(Configuration cfg, XcodeProgram xcodeml, Xnode hook, Map<String, String> metadata, List<String> fields, String savepointName, SerializationStep step) { return generateSavepoint(cfg, xcodeml, hook, metadata, fields, savepointName, step, SerializationMode.READ); } /** * Create function calls to the serialization library to write or read a * savepoint. * * @param xcodeml Current XcodeML translation unit. * @param hook Hook for node insertion. * @param metadata Key=value information for metadata. * @param fields List of fields. * @param savepointName Name of the savepoint. * @return Last inserted node. */ private static Xnode generateSavepoint(Configuration cfg, XcodeProgram xcodeml, Xnode hook, Map<String, String> metadata, List<String> fields, String savepointName, SerializationStep step, SerializationMode mode) { if (!cfg.getBooleanParameter(Configuration.SCA_SERIALIZATION_ENABLED)) { return hook; } if ((cfg.seriliazeRead() && mode != SerializationMode.READ) || (cfg.seriliazeWrite() && mode != SerializationMode.WRITE)) { return hook; } savepointName = String.format("%s_%s", savepointName, step == SerializationStep.SER_IN ? SAVEPOINT_IN_SUFFIX : SAVEPOINT_OUT_SUFFIX); List<Xnode> nodes = new ArrayList<>(); nodes.add(createSavepoint(xcodeml, savepointName)); for (Map.Entry<String, String> entry : metadata.entrySet()) { nodes.add(createAddMetaInfoCall(xcodeml, entry.getKey(), entry.getValue())); } Set<String> uniqueFields = new HashSet<>(fields); Map<String, Integer> fieldNames = new HashMap<>(); for (String field : uniqueFields) { String fieldName = cleanUpFieldName(field); if (fieldNames.containsKey(fieldName)) { int counter = fieldNames.get(fieldName) + 1; fieldNames.replace(fieldName, counter); fieldName = String.format("%s_%d", fieldName, counter); } else { fieldNames.put(fieldName, 0); } if (mode == SerializationMode.WRITE) { nodes.add(createWriteFieldCall(xcodeml, savepointName, field, fieldName)); } else if (mode == SerializationMode.READ) { nodes.add(createReadFieldCall(xcodeml, savepointName, field, fieldName)); } } return insertNodes(step, hook, nodes); } /** * Insert nodes for an input or output serialization. * * @param step Serialization step information. * @param hook Hook node to start insertion. * @param nodes List of nodes to be inserted. */ private static Xnode insertNodes(SerializationStep step, Xnode hook, List<Xnode> nodes) { Xnode crtHook = hook; for (Xnode node : nodes) { if (step == SerializationStep.SER_OUT) { crtHook.insertAfter(node); crtHook = node; } else if (step == SerializationStep.SER_IN) { if (crtHook.equals(hook)) { crtHook.insertBefore(node); } else { crtHook.insertAfter(node); } crtHook = node; } } return crtHook; } /** * Insert the correct USE statements for using the serialization library. * * @param xcodeml Current XcodeML/F translation unit. * @param fctDef Function definition. */ public static void insertImports(Configuration cfg, XcodeProgram xcodeml, FfunctionDefinition fctDef) { if (!cfg.getBooleanParameter(Configuration.SCA_SERIALIZATION_ENABLED)) { return; } fctDef.getDeclarationTable().insertUseDecl(xcodeml, SER_MODULE_M_SERIALIZE); fctDef.getDeclarationTable().insertUseDecl(xcodeml, SER_MODULE_UTILS_PPSER); } /** * Remove illegal character in the field name. * * @param fieldName Original field name. * @return Cleaned up field name. */ private static String cleanUpFieldName(String fieldName) { return fieldName.replaceAll("%", "_").replaceAll("\\(.*\\)", "").replaceAll(":", "").replaceAll(",", ""); } }
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.core.datamgt.dictionary.service.base; import com.liferay.portal.kernel.bean.BeanReference; import com.liferay.portal.kernel.bean.IdentifiableBean; import com.liferay.portal.kernel.dao.jdbc.SqlUpdate; import com.liferay.portal.kernel.dao.jdbc.SqlUpdateFactoryUtil; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.service.BaseServiceImpl; import com.liferay.portal.service.persistence.UserPersistence; import org.oep.core.datamgt.dictionary.model.DictAttribute; import org.oep.core.datamgt.dictionary.service.DictAttributeService; import org.oep.core.datamgt.dictionary.service.persistence.DictAttributePersistence; import org.oep.core.datamgt.dictionary.service.persistence.DictCollectionFinder; import org.oep.core.datamgt.dictionary.service.persistence.DictCollectionPersistence; import org.oep.core.datamgt.dictionary.service.persistence.DictDataPersistence; import org.oep.core.datamgt.dictionary.service.persistence.DictMetaDataPersistence; import javax.sql.DataSource; /** * Provides the base implementation for the dictionary attribute remote service. * * <p> * This implementation exists only as a container for the default service methods generated by ServiceBuilder. All custom service methods should be put in {@link org.oep.core.datamgt.dictionary.service.impl.DictAttributeServiceImpl}. * </p> * * @author TrungDK * @see org.oep.core.datamgt.dictionary.service.impl.DictAttributeServiceImpl * @see org.oep.core.datamgt.dictionary.service.DictAttributeServiceUtil * @generated */ public abstract class DictAttributeServiceBaseImpl extends BaseServiceImpl implements DictAttributeService, IdentifiableBean { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this class directly. Always use {@link org.oep.core.datamgt.dictionary.service.DictAttributeServiceUtil} to access the dictionary attribute remote service. */ /** * Returns the dictionary attribute local service. * * @return the dictionary attribute local service */ public org.oep.core.datamgt.dictionary.service.DictAttributeLocalService getDictAttributeLocalService() { return dictAttributeLocalService; } /** * Sets the dictionary attribute local service. * * @param dictAttributeLocalService the dictionary attribute local service */ public void setDictAttributeLocalService( org.oep.core.datamgt.dictionary.service.DictAttributeLocalService dictAttributeLocalService) { this.dictAttributeLocalService = dictAttributeLocalService; } /** * Returns the dictionary attribute remote service. * * @return the dictionary attribute remote service */ public org.oep.core.datamgt.dictionary.service.DictAttributeService getDictAttributeService() { return dictAttributeService; } /** * Sets the dictionary attribute remote service. * * @param dictAttributeService the dictionary attribute remote service */ public void setDictAttributeService( org.oep.core.datamgt.dictionary.service.DictAttributeService dictAttributeService) { this.dictAttributeService = dictAttributeService; } /** * Returns the dictionary attribute persistence. * * @return the dictionary attribute persistence */ public DictAttributePersistence getDictAttributePersistence() { return dictAttributePersistence; } /** * Sets the dictionary attribute persistence. * * @param dictAttributePersistence the dictionary attribute persistence */ public void setDictAttributePersistence( DictAttributePersistence dictAttributePersistence) { this.dictAttributePersistence = dictAttributePersistence; } /** * Returns the dictionary collection local service. * * @return the dictionary collection local service */ public org.oep.core.datamgt.dictionary.service.DictCollectionLocalService getDictCollectionLocalService() { return dictCollectionLocalService; } /** * Sets the dictionary collection local service. * * @param dictCollectionLocalService the dictionary collection local service */ public void setDictCollectionLocalService( org.oep.core.datamgt.dictionary.service.DictCollectionLocalService dictCollectionLocalService) { this.dictCollectionLocalService = dictCollectionLocalService; } /** * Returns the dictionary collection remote service. * * @return the dictionary collection remote service */ public org.oep.core.datamgt.dictionary.service.DictCollectionService getDictCollectionService() { return dictCollectionService; } /** * Sets the dictionary collection remote service. * * @param dictCollectionService the dictionary collection remote service */ public void setDictCollectionService( org.oep.core.datamgt.dictionary.service.DictCollectionService dictCollectionService) { this.dictCollectionService = dictCollectionService; } /** * Returns the dictionary collection persistence. * * @return the dictionary collection persistence */ public DictCollectionPersistence getDictCollectionPersistence() { return dictCollectionPersistence; } /** * Sets the dictionary collection persistence. * * @param dictCollectionPersistence the dictionary collection persistence */ public void setDictCollectionPersistence( DictCollectionPersistence dictCollectionPersistence) { this.dictCollectionPersistence = dictCollectionPersistence; } /** * Returns the dictionary collection finder. * * @return the dictionary collection finder */ public DictCollectionFinder getDictCollectionFinder() { return dictCollectionFinder; } /** * Sets the dictionary collection finder. * * @param dictCollectionFinder the dictionary collection finder */ public void setDictCollectionFinder( DictCollectionFinder dictCollectionFinder) { this.dictCollectionFinder = dictCollectionFinder; } /** * Returns the dictionary data local service. * * @return the dictionary data local service */ public org.oep.core.datamgt.dictionary.service.DictDataLocalService getDictDataLocalService() { return dictDataLocalService; } /** * Sets the dictionary data local service. * * @param dictDataLocalService the dictionary data local service */ public void setDictDataLocalService( org.oep.core.datamgt.dictionary.service.DictDataLocalService dictDataLocalService) { this.dictDataLocalService = dictDataLocalService; } /** * Returns the dictionary data remote service. * * @return the dictionary data remote service */ public org.oep.core.datamgt.dictionary.service.DictDataService getDictDataService() { return dictDataService; } /** * Sets the dictionary data remote service. * * @param dictDataService the dictionary data remote service */ public void setDictDataService( org.oep.core.datamgt.dictionary.service.DictDataService dictDataService) { this.dictDataService = dictDataService; } /** * Returns the dictionary data persistence. * * @return the dictionary data persistence */ public DictDataPersistence getDictDataPersistence() { return dictDataPersistence; } /** * Sets the dictionary data persistence. * * @param dictDataPersistence the dictionary data persistence */ public void setDictDataPersistence(DictDataPersistence dictDataPersistence) { this.dictDataPersistence = dictDataPersistence; } /** * Returns the dictionary meta data local service. * * @return the dictionary meta data local service */ public org.oep.core.datamgt.dictionary.service.DictMetaDataLocalService getDictMetaDataLocalService() { return dictMetaDataLocalService; } /** * Sets the dictionary meta data local service. * * @param dictMetaDataLocalService the dictionary meta data local service */ public void setDictMetaDataLocalService( org.oep.core.datamgt.dictionary.service.DictMetaDataLocalService dictMetaDataLocalService) { this.dictMetaDataLocalService = dictMetaDataLocalService; } /** * Returns the dictionary meta data remote service. * * @return the dictionary meta data remote service */ public org.oep.core.datamgt.dictionary.service.DictMetaDataService getDictMetaDataService() { return dictMetaDataService; } /** * Sets the dictionary meta data remote service. * * @param dictMetaDataService the dictionary meta data remote service */ public void setDictMetaDataService( org.oep.core.datamgt.dictionary.service.DictMetaDataService dictMetaDataService) { this.dictMetaDataService = dictMetaDataService; } /** * Returns the dictionary meta data persistence. * * @return the dictionary meta data persistence */ public DictMetaDataPersistence getDictMetaDataPersistence() { return dictMetaDataPersistence; } /** * Sets the dictionary meta data persistence. * * @param dictMetaDataPersistence the dictionary meta data persistence */ public void setDictMetaDataPersistence( DictMetaDataPersistence dictMetaDataPersistence) { this.dictMetaDataPersistence = dictMetaDataPersistence; } /** * Returns the counter local service. * * @return the counter local service */ public com.liferay.counter.service.CounterLocalService getCounterLocalService() { return counterLocalService; } /** * Sets the counter local service. * * @param counterLocalService the counter local service */ public void setCounterLocalService( com.liferay.counter.service.CounterLocalService counterLocalService) { this.counterLocalService = counterLocalService; } /** * Returns the resource local service. * * @return the resource local service */ public com.liferay.portal.service.ResourceLocalService getResourceLocalService() { return resourceLocalService; } /** * Sets the resource local service. * * @param resourceLocalService the resource local service */ public void setResourceLocalService( com.liferay.portal.service.ResourceLocalService resourceLocalService) { this.resourceLocalService = resourceLocalService; } /** * Returns the user local service. * * @return the user local service */ public com.liferay.portal.service.UserLocalService getUserLocalService() { return userLocalService; } /** * Sets the user local service. * * @param userLocalService the user local service */ public void setUserLocalService( com.liferay.portal.service.UserLocalService userLocalService) { this.userLocalService = userLocalService; } /** * Returns the user remote service. * * @return the user remote service */ public com.liferay.portal.service.UserService getUserService() { return userService; } /** * Sets the user remote service. * * @param userService the user remote service */ public void setUserService( com.liferay.portal.service.UserService userService) { this.userService = userService; } /** * Returns the user persistence. * * @return the user persistence */ public UserPersistence getUserPersistence() { return userPersistence; } /** * Sets the user persistence. * * @param userPersistence the user persistence */ public void setUserPersistence(UserPersistence userPersistence) { this.userPersistence = userPersistence; } public void afterPropertiesSet() { Class<?> clazz = getClass(); _classLoader = clazz.getClassLoader(); } public void destroy() { } /** * Returns the Spring bean ID for this bean. * * @return the Spring bean ID for this bean */ @Override public String getBeanIdentifier() { return _beanIdentifier; } /** * Sets the Spring bean ID for this bean. * * @param beanIdentifier the Spring bean ID for this bean */ @Override public void setBeanIdentifier(String beanIdentifier) { _beanIdentifier = beanIdentifier; } @Override public Object invokeMethod(String name, String[] parameterTypes, Object[] arguments) throws Throwable { Thread currentThread = Thread.currentThread(); ClassLoader contextClassLoader = currentThread.getContextClassLoader(); if (contextClassLoader != _classLoader) { currentThread.setContextClassLoader(_classLoader); } try { return _clpInvoker.invokeMethod(name, parameterTypes, arguments); } finally { if (contextClassLoader != _classLoader) { currentThread.setContextClassLoader(contextClassLoader); } } } protected Class<?> getModelClass() { return DictAttribute.class; } protected String getModelClassName() { return DictAttribute.class.getName(); } /** * Performs an SQL query. * * @param sql the sql query */ protected void runSQL(String sql) throws SystemException { try { DataSource dataSource = dictAttributePersistence.getDataSource(); SqlUpdate sqlUpdate = SqlUpdateFactoryUtil.getSqlUpdate(dataSource, sql, new int[0]); sqlUpdate.update(); } catch (Exception e) { throw new SystemException(e); } } @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictAttributeLocalService.class) protected org.oep.core.datamgt.dictionary.service.DictAttributeLocalService dictAttributeLocalService; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictAttributeService.class) protected org.oep.core.datamgt.dictionary.service.DictAttributeService dictAttributeService; @BeanReference(type = DictAttributePersistence.class) protected DictAttributePersistence dictAttributePersistence; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictCollectionLocalService.class) protected org.oep.core.datamgt.dictionary.service.DictCollectionLocalService dictCollectionLocalService; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictCollectionService.class) protected org.oep.core.datamgt.dictionary.service.DictCollectionService dictCollectionService; @BeanReference(type = DictCollectionPersistence.class) protected DictCollectionPersistence dictCollectionPersistence; @BeanReference(type = DictCollectionFinder.class) protected DictCollectionFinder dictCollectionFinder; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictDataLocalService.class) protected org.oep.core.datamgt.dictionary.service.DictDataLocalService dictDataLocalService; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictDataService.class) protected org.oep.core.datamgt.dictionary.service.DictDataService dictDataService; @BeanReference(type = DictDataPersistence.class) protected DictDataPersistence dictDataPersistence; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictMetaDataLocalService.class) protected org.oep.core.datamgt.dictionary.service.DictMetaDataLocalService dictMetaDataLocalService; @BeanReference(type = org.oep.core.datamgt.dictionary.service.DictMetaDataService.class) protected org.oep.core.datamgt.dictionary.service.DictMetaDataService dictMetaDataService; @BeanReference(type = DictMetaDataPersistence.class) protected DictMetaDataPersistence dictMetaDataPersistence; @BeanReference(type = com.liferay.counter.service.CounterLocalService.class) protected com.liferay.counter.service.CounterLocalService counterLocalService; @BeanReference(type = com.liferay.portal.service.ResourceLocalService.class) protected com.liferay.portal.service.ResourceLocalService resourceLocalService; @BeanReference(type = com.liferay.portal.service.UserLocalService.class) protected com.liferay.portal.service.UserLocalService userLocalService; @BeanReference(type = com.liferay.portal.service.UserService.class) protected com.liferay.portal.service.UserService userService; @BeanReference(type = UserPersistence.class) protected UserPersistence userPersistence; private String _beanIdentifier; private ClassLoader _classLoader; private DictAttributeServiceClpInvoker _clpInvoker = new DictAttributeServiceClpInvoker(); }
/** * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.pricer.impl.volatility.smile; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import java.util.Arrays; import java.util.BitSet; import java.util.function.Function; import org.slf4j.Logger; import org.testng.annotations.Test; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.collect.array.DoubleArray; import com.opengamma.strata.collect.array.DoubleMatrix; import com.opengamma.strata.math.impl.differentiation.VectorFieldFirstOrderDifferentiator; import com.opengamma.strata.math.impl.statistics.leastsquare.LeastSquareResults; import com.opengamma.strata.math.impl.statistics.leastsquare.LeastSquareResultsWithTransform; import cern.jet.random.engine.MersenneTwister; import cern.jet.random.engine.RandomEngine; /** * Test case for smile model fitters. * * @param <T> the smile model data */ @Test public abstract class SmileModelFitterTest<T extends SmileModelData> { protected static double TIME_TO_EXPIRY = 7.0; protected static double F = 0.03; private static RandomEngine UNIFORM = new MersenneTwister(); protected static double[] STRIKES = new double[] {0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1}; protected double[] _cleanVols; protected double[] _noisyVols; protected double[] _errors; protected VolatilityFunctionProvider<T> _model; protected SmileModelFitter<T> _fitter; protected SmileModelFitter<T> _nosiyFitter; protected double _chiSqEps = 1e-6; protected double _paramValueEps = 1e-6; abstract Logger getlogger(); abstract VolatilityFunctionProvider<T> getModel(); abstract T getModelData(); abstract SmileModelFitter<T> getFitter( double forward, double[] strikes, double timeToExpiry, double[] impliedVols, double[] error, VolatilityFunctionProvider<T> model); abstract double[][] getStartValues(); abstract double[] getRandomStartValues(); abstract BitSet[] getFixedValues(); public SmileModelFitterTest() { VolatilityFunctionProvider<T> model = getModel(); T data = getModelData(); int n = STRIKES.length; _noisyVols = new double[n]; _errors = new double[n]; _cleanVols = new double[n]; Arrays.fill(_errors, 1e-4); for (int i = 0; i < n; i++) { _cleanVols[i] = model.volatility(F, STRIKES[i], TIME_TO_EXPIRY, data); _noisyVols[i] = _cleanVols[i] + UNIFORM.nextDouble() * _errors[i]; } _fitter = getFitter(F, STRIKES, TIME_TO_EXPIRY, _cleanVols, _errors, model); _nosiyFitter = getFitter(F, STRIKES, TIME_TO_EXPIRY, _noisyVols, _errors, model); } @SuppressWarnings("unused") public void testExactFit() { double[][] start = getStartValues(); BitSet[] fixed = getFixedValues(); int nStartPoints = start.length; ArgChecker.isTrue(fixed.length == nStartPoints); for (int trys = 0; trys < nStartPoints; trys++) { LeastSquareResultsWithTransform results = _fitter.solve(DoubleArray.copyOf(start[trys]), fixed[trys]); DoubleArray res = results.getModelParameters(); assertEquals(0.0, results.getChiSq(), _chiSqEps); int n = res.size(); T data = getModelData(); assertEquals(data.getNumberOfParameters(), n); for (int i = 0; i < n; i++) { assertEquals(data.getParameter(i), res.get(i), _paramValueEps); } } } public void testNoisyFit() { double[][] start = getStartValues(); BitSet[] fixed = getFixedValues(); int nStartPoints = start.length; ArgChecker.isTrue(fixed.length == nStartPoints); for (int trys = 0; trys < nStartPoints; trys++) { LeastSquareResultsWithTransform results = _nosiyFitter.solve(DoubleArray.copyOf(start[trys]), fixed[trys]); DoubleArray res = results.getModelParameters(); double eps = 1e-2; assertTrue(results.getChiSq() < 7); int n = res.size(); T data = getModelData(); assertEquals(data.getNumberOfParameters(), n); for (int i = 0; i < n; i++) { assertEquals(data.getParameter(i), res.get(i), eps); } } } public void timeTest() { long start = 0; int hotspotWarmupCycles = 200; int benchmarkCycles = 1000; int nStarts = getStartValues().length; for (int i = 0; i < hotspotWarmupCycles; i++) { testNoisyFit(); } start = System.nanoTime(); for (int i = 0; i < benchmarkCycles; i++) { testNoisyFit(); } long time = System.nanoTime() - start; getlogger().info("time per fit: " + ((double) time) / benchmarkCycles / nStarts + "ms"); } public void horribleMarketDataTest() { double forward = 0.0059875; double[] strikes = new double[] {0.0012499999999999734, 0.0024999999999999467, 0.003750000000000031, 0.0050000000000000044, 0.006249999999999978, 0.007499999999999951, 0.008750000000000036, 0.010000000000000009, 0.011249999999999982, 0.012499999999999956, 0.01375000000000004, 0.015000000000000013, 0.016249999999999987, 0.01749999999999996, 0.018750000000000044, 0.020000000000000018, 0.02124999999999999, 0.022499999999999964, 0.02375000000000005, 0.025000000000000022, 0.026249999999999996, 0.02749999999999997, 0.028750000000000053, 0.030000000000000027}; double expiry = 0.09041095890410959; double[] vols = new double[] {2.7100433855959642, 1.5506135190088546, 0.9083977239618538, 0.738416513934868, 0.8806973450124451, 1.0906290439592792, 1.2461975189027226, 1.496275983572826, 1.5885915338673156, 1.4842142974195722, 1.7667347426399058, 1.4550288621444052, 1.0651798188736166, 1.143318270172714, 1.216215092528441, 1.2845258218014657, 1.3488224665755535, 1.9259326343836376, 1.9868728791190922, 2.0441767092857317, 2.0982583238541026, 2.1494622372820675, 2.198020785622251, 2.244237863291375}; int n = strikes.length; double[] errors = new double[n]; Arrays.fill(errors, 0.01); //1% error SmileModelFitter<T> fitter = getFitter(forward, strikes, expiry, vols, errors, getModel()); LeastSquareResults best = null; BitSet fixed = new BitSet(); for (int i = 0; i < 5; i++) { double[] start = getRandomStartValues(); // int nStartPoints = start.length; LeastSquareResults lsRes = fitter.solve(DoubleArray.copyOf(start), fixed); if (best == null) { best = lsRes; } else { if (lsRes.getChiSq() < best.getChiSq()) { best = lsRes; } } } if (best != null) { assertTrue(best.getChiSq() < 24000); //average error 31.6% - not a good fit, but the data is horrible } } public void testJacobian() { T data = getModelData(); int n = data.getNumberOfParameters(); double[] temp = new double[n]; for (int i = 0; i < n; i++) { temp[i] = data.getParameter(i); } DoubleArray x = DoubleArray.copyOf(temp); testJacobian(x); } // random test to be turned off @Test(enabled = false) public void testRandomJacobian() { for (int i = 0; i < 10; i++) { double[] temp = getRandomStartValues(); DoubleArray x = DoubleArray.copyOf(temp); try { testJacobian(x); } catch (AssertionError e) { System.out.println("Jacobian test failed at " + x.toString()); throw e; } } } private void testJacobian(DoubleArray x) { int n = x.size(); Function<DoubleArray, DoubleArray> func = _fitter.getModelValueFunction(); Function<DoubleArray, DoubleMatrix> jacFunc = _fitter.getModelJacobianFunction(); VectorFieldFirstOrderDifferentiator differ = new VectorFieldFirstOrderDifferentiator(); Function<DoubleArray, DoubleMatrix> jacFuncFD = differ.differentiate(func); DoubleMatrix jac = jacFunc.apply(x); DoubleMatrix jacFD = jacFuncFD.apply(x); int rows = jacFD.rowCount(); int cols = jacFD.columnCount(); assertEquals(_cleanVols.length, rows); assertEquals(n, cols); assertEquals(rows, jac.rowCount()); assertEquals(cols, jac.columnCount()); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { assertEquals(jacFD.get(i, j), jac.get(i, j), 2e-2); } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package sandeshvahak.helper; import java.util.HashMap; /** * * @author Vijay * @param <T> * @param <U> */ public class TokenMap<T, U> { private T first; private U second; public TokenMap() { } public TokenMap(T first, U second) { this.first = first; this.second = second; } public T getFisrt() { return this.first; } public U getSecond() { return this.second; } public void setFirst(T first) { this.first = first; } public void setSecond(U second) { this.second = second; } } class TokenDictionary { private static TokenDictionary tokenDictionary; private final int secondaryStringsStart = 236; private static final String[] primaryStrings = new String[]{"", "", "", "account", "ack", "action", "active", "add", "after", "all", "allow", "apple", "auth", "author", "available", "bad-protocol", "bad-request", "before", "body", "broadcast", "cancel", "category", "challenge", "chat", "clean", "code", "composing", "config", "contacts", "count", "create", "creation", "debug", "default", "delete", "delivery", "delta", "deny", "digest", "dirty", "duplicate", "elapsed", "enable", "encoding", "error", "event", "expiration", "expired", "fail", "failure", "false", "favorites", "feature", "features", "feature-not-implemented", "field", "first", "free", "from", "g.us", "get", "google", "group", "groups", "http://etherx.jabber.org/streams", "http://jabber.org/protocol/chatstates", "ib", "id", "image", "img", "index", "internal-server-error", "ip", "iq", "item-not-found", "item", "jabber:iq:last", "jabber:iq:privacy", "jabber:x:event", "jid", "kind", "last", "leave", "list", "max", "mechanism", "media", "message_acks", "message", "method", "microsoft", "missing", "modify", "mute", "name", "nokia", "none", "not-acceptable", "not-allowed", "not-authorized", "notification", "notify", "off", "offline", "order", "owner", "owning", "p_o", "p_t", "paid", "participant", "participants", "participating", "paused", "picture", "pin", "ping", "platform", "port", "presence", "preview", "probe", "prop", "props", "query", "raw", "read", "reason", "receipt", "received", "relay", "remote-server-timeout", "remove", "request", "required", "resource-constraint", "resource", "response", "result", "retry", "rim", "s_o", "s_t", "s.us", "s.whatsapp.net", "seconds", "server-error", "server", "service-unavailable", "set", "show", "silent", "stat", "status", "stream:error", "stream:features", "subject", "subscribe", "success", "sync", "t", "text", "timeout", "timestamp", "to", "true", "type", "unavailable", "unsubscribe", "uri", "url", "urn:ietf:params:xml:ns:xmpp-sasl", "urn:ietf:params:xml:ns:xmpp-stanzas", "urn:ietf:params:xml:ns:xmpp-streams", "urn:xmpp:ping", "urn:xmpp:receipts", "urn:xmpp:whatsapp:account", "urn:xmpp:whatsapp:dirty", "urn:xmpp:whatsapp:mms", "urn:xmpp:whatsapp:push", "urn:xmpp:whatsapp", "user", "user-not-found", "value", "version", "w:g", "w:p:r", "w:p", "w:profile:picture", "w", "wait", "WAUTH-2", "x", "xmlns:stream", "xmlns", "1", "chatstate", "crypto", "enc", "class", "off_cnt", "w:g2", "promote", "demote", "creator" }; private static final String[][] secondaryStrings = new String[][]{ new String[]{ "Bell.caf", "Boing.caf", "Glass.caf", "Harp.caf", "TimePassing.caf", "Tri-tone.caf", "Xylophone.caf", "background", "backoff", "chunked", "context", "full", "in", "interactive", "out", "registration", "sid", "urn:xmpp:whatsapp:sync", "flt", "s16", "u8", "adpcm", "amrnb", "amrwb", "mp3", "pcm", "qcelp", "wma", "h263", "h264", "jpeg", "mpeg4", "wmv", "audio/3gpp", "audio/aac", "audio/amr", "audio/mp4", "audio/mpeg", "audio/ogg", "audio/qcelp", "audio/wav", "audio/webm", "audio/x-caf", "audio/x-ms-wma", "image/gif", "image/jpeg", "image/png", "video/3gpp", "video/avi", "video/mp4", "video/mpeg", "video/quicktime", "video/x-flv", "video/x-ms-asf", "302", "400", "401", "402", "403", "404", "405", "406", "407", "409", "500", "501", "503", "504", "abitrate", "acodec", "app_uptime", "asampfmt", "asampfreq", "audio", "bb_db", "clear", "conflict", "conn_no_nna", "cost", "currency", "duration", "extend", "file", "fps", "g_notify", "g_sound", "gcm", "google_play", "hash", "height", "invalid", "jid-malformed", "latitude", "lc", "lg", "live", "location", "log", "longitude", "max_groups", "max_participants", "max_subject", "mimetype", "mode", "napi_version", "normalize", "orighash", "origin", "passive", "password", "played", "policy-violation", "pop_mean_time", "pop_plus_minus", "price", "pricing", "redeem", "Replaced by new connection", "resume", "signature", "size", "sound", "source", "system-shutdown", "username", "vbitrate", "vcard", "vcodec", "video", "width", "xml-not-well-formed", "checkmarks", "image_max_edge", "image_max_kbytes", "image_quality", "ka", "ka_grow", "ka_shrink", "newmedia", "library", "caption", "forward", "c0", "c1", "c2", "c3", "clock_skew", "cts", "k0", "k1", "login_rtt", "m_id", "nna_msg_rtt", "nna_no_off_count", "nna_offline_ratio", "nna_push_rtt", "no_nna_con_count", "off_msg_rtt", "on_msg_rtt", "stat_name", "sts", "suspect_conn", "lists", "self", "qr", "web", "w:b", "recipient", "w:stats", "forbidden", "aurora.m4r", "bamboo.m4r", "chord.m4r", "circles.m4r", "complete.m4r", "hello.m4r", "input.m4r", "keys.m4r", "note.m4r", "popcorn.m4r", "pulse.m4r", "synth.m4r", "filehash" } }; private final HashMap<String, Integer> primaryStringDict = new HashMap<>(); private final HashMap<String, TokenMap<Integer, Integer>> secondaryStringDict = new HashMap<>(); public static TokenDictionary getInstance() { if (tokenDictionary == null) { tokenDictionary = new TokenDictionary(); } return tokenDictionary; } private TokenDictionary() { for (int i = 0; i < TokenDictionary.primaryStrings.length; i++) { String text = TokenDictionary.primaryStrings[i]; if (text != null) { this.primaryStringDict.put(text, i); } } for (int j = 0; j < TokenDictionary.secondaryStrings.length; j++) { String[] array = TokenDictionary.secondaryStrings[j]; for (int k = 0; k < array.length; k++) { String text2 = array[k]; if (text2 != null) { this.secondaryStringDict.put(text2, new TokenMap<>((j + 236), (k))); } } } } public boolean tryGetToken(String str, ByRef subdict, ByRef token) { /* * if (this.primaryStringDict.TryGetValue(str, out token)) { return true; } TokenMap<int, int> tokenMap; if (this.secondaryStringDict.TryGetValue(str, out tokenMap)) { subdict = tokenMap.First; token = tokenMap.Second; return true; } return false; */ Integer i = this.primaryStringDict.get(str); if (i != null && i >= 0) { token.set(i); return true; } TokenMap<Integer, Integer> tokenMap = (TokenMap) this.secondaryStringDict.get(str); if (tokenMap != null) { subdict.set(tokenMap.getFisrt()); token.set(tokenMap.getSecond()); return true; } return false; } public void getToken(int token, ByRef<Integer> subdict, ByRef<String> str) { /* * string[] array = null; if (subdict >= 0) { if (subdict >= TokenDictionary.secondaryStrings.Length) { throw new Exception("Invalid subdictionary " + subdict); } array = TokenDictionary.secondaryStrings[subdict]; } else { if (token >= 236 && token < 236 + TokenDictionary.secondaryStrings.Length) { subdict = token - 236; } else { array = TokenDictionary.primaryStrings; } } if (array != null) { if (token < 0 || token > array.Length) { throw new Exception("Invalid token " + token); } str = array[token]; if (str == null) { throw new Exception("invalid token/length in getToken"); } } */ String[] array = null; if (subdict.get() >= 0) { if (subdict.get() >= TokenDictionary.secondaryStrings.length) { throw new UnsupportedOperationException("Invalid subdictionary " + subdict); } array = TokenDictionary.secondaryStrings[subdict.get()]; } else { if (token >= 236 && token < 236 + TokenDictionary.secondaryStrings.length) { subdict.set(token - 236); } else { array = TokenDictionary.primaryStrings; } } if (array != null) { if (token < 0 || token > array.length) { throw new UnsupportedOperationException("Invalid token " + token); } str.set(array[token]); } } }
/* * Copyright 2017 Axway Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axway.ats.uiengine.utilities.mobile; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.touch.TouchActions; import org.openqa.selenium.remote.UnreachableBrowserException; import com.axway.ats.common.PublicAtsApi; import com.axway.ats.uiengine.MobileDriver; import com.axway.ats.uiengine.configuration.UiEngineConfigurator; import com.axway.ats.uiengine.elements.UiElement; import com.axway.ats.uiengine.elements.UiElementProperties; import com.axway.ats.uiengine.elements.mobile.MobileElementFinder; import com.axway.ats.uiengine.exceptions.MobileOperationException; import com.axway.ats.uiengine.exceptions.VerificationException; import com.axway.ats.uiengine.internal.driver.InternalObjectsEnum; import com.axway.ats.uiengine.utilities.UiEngineUtilities; import io.appium.java_client.AppiumDriver; import io.appium.java_client.MobileElement; public class MobileElementState { private static final int SLEEP_PERIOD = 500; // in ms private AppiumDriver<? extends WebElement> appiumDriver; private UiElementProperties elementProperties; private UiElement element; /** * @param uiElement the element of interest */ public MobileElementState( UiElement uiElement ) { this.element = uiElement; this.elementProperties = uiElement.getElementProperties(); this.appiumDriver = (AppiumDriver<? extends WebElement>) ((MobileDriver) uiElement.getUiDriver()).getInternalObject( InternalObjectsEnum.WebDriver.name()); } /** * Moves the focus to the specified element. Currently issued with tap */ @PublicAtsApi public void focus() { try { MobileElement mobileElement = (MobileElement) MobileElementFinder.findElement(appiumDriver, element); // use tap to focus new TouchActions(appiumDriver).singleTap(mobileElement).perform(); } catch (Exception se) { throw new MobileOperationException("Error trying to set the focus to " + getElementDescription(), se); } } /** * Verifies the element exist * * throws an exception if verification fail */ @PublicAtsApi public void verifyExist() { boolean exists = isElementPresent(); if (!exists) { throw new VerificationException(getElementDescription() + " does not exist while it is expected to exist"); } } /** * Verifies the element does NOT exist * * throws an exception if verification fail */ @PublicAtsApi public void verifyNotExist() { boolean exists = isElementPresent(); if (exists) { throw new VerificationException(getElementDescription() + " exists while it is expected to not exist"); } } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become existing * * throws an exception if it does not become existing * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeExisting() { waitToBecomeExisting(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time the element to become existing * * throws an exception if it does not become existing for the specified period */ @PublicAtsApi public void waitToBecomeExisting( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (isElementPresent()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify the element exist within " + millis + " ms" + getElementDescription()); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to became non-existing * * throws an exception if it does not become non-existing * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeNotExisting() { waitToBecomeNotExisting(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time the element to became non-existing * * throws an exception if it does not become non-existing for the specified period */ @PublicAtsApi public void waitToBecomeNotExisting( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (!isElementPresent()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify the element is not existing within " + millis + " ms" + getElementDescription()); } /** * Waits for a period of time (check the 'elementStateChangeDelay' property) the element to become displayed * * throws an exception if it does not become displayed * for the default waiting period (check the 'elementStateChangeDelay' property) */ @PublicAtsApi public void waitToBecomeDisplayed() { waitToBecomeDisplayed(UiEngineConfigurator.getInstance().getElementStateChangeDelay()); } /** * Waits for a period of time the element to become displayed * * throws an exception if it does not become displayed for the specified period */ @PublicAtsApi public void waitToBecomeDisplayed( int millis ) { long endTime = System.currentTimeMillis() + millis; do { if (isElementDisplayed()) { return; } UiEngineUtilities.sleep(SLEEP_PERIOD); } while (endTime - System.currentTimeMillis() > 0); throw new VerificationException("Failed to verify the element become displayed within " + millis + " ms" + getElementDescription()); } @PublicAtsApi public boolean isElementPresent() { try { return MobileElementFinder.findElement(appiumDriver, element) != null; } catch (UnreachableBrowserException ube) { throw new MobileOperationException( "Check if there is connection to the target device and the Appium server is running", ube); } catch (Exception e) { // element is not present or got error checking if it is present return false; } } @PublicAtsApi public boolean isElementDisplayed() { try { WebElement webElement = MobileElementFinder.findElement(appiumDriver, element); if (webElement == null) { return false; } else { return webElement.isDisplayed(); } } catch (UnreachableBrowserException ube) { throw new MobileOperationException( "Check if there is connection to the target device and the Appium server is running", ube); } catch (Exception e) { // element is not present or got error checking if it is present return false; } } private String getElementDescription() { StringBuilder desc = new StringBuilder(); desc.append(" '"); if (element != null) { desc.append(element.toString()); } else { desc.append("Element ").append(elementProperties.toString()); } // append 'context' if not specified thru the element properties if (elementProperties.getProperty("context") == null) { desc.append(", context="); desc.append(MobileElementFinder.defaultContext); } desc.append("'"); return desc.toString(); } }
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.internal.cache; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.logging.log4j.Logger; import com.gemstone.gemfire.cache.AttributesFactory; import com.gemstone.gemfire.cache.DataPolicy; import com.gemstone.gemfire.cache.DuplicatePrimaryPartitionException; import com.gemstone.gemfire.cache.EvictionAttributes; import com.gemstone.gemfire.cache.FixedPartitionAttributes; import com.gemstone.gemfire.cache.PartitionAttributes; import com.gemstone.gemfire.cache.RegionAttributes; import com.gemstone.gemfire.cache.Scope; import com.gemstone.gemfire.internal.Assert; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.internal.logging.LogService; import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage; /** * * @author ymahajan * */ public class PartitionRegionConfigValidator { private static final Logger logger = LogService.getLogger(); private final PartitionedRegion pr; // Incompatible LRU memory eviction attributes maximum message fragment, for // tests public static final String EVICTION_ATTRIBUTE_MAXIMUM_MEMORY_MESSAGE = " the Eviction Attribute for maximum memory, "; // Incompatible LRU entry eviction attributes maximum message fragment, for // tests public static final String EVICTION_ATTRIBUTE_MAXIMUM_ENTRIES_MESSAGE = " the Eviction Attribute for maximum entries, "; // Incompatible eviction attributes exception message fragment, for tests public static final String EVICTION_ATTRIBUTES_ARE_INCOMPATIBLE_MESSAGE = " is incompatible with other VMs which have EvictionAttributes "; public PartitionRegionConfigValidator(PartitionedRegion pr) { this.pr = pr; } /** * This method validates the PartitionedAttributes that user provided PR * Attributes with PR Attributes set in PR Config obtained from global * meta-data allPartitionedRegion region */ void validatePartitionAttrsFromPRConfig(PartitionRegionConfig prconf) { final PartitionAttributes prconfPA = prconf.getPartitionAttrs(); final PartitionAttributes userPA = pr.getAttributes() .getPartitionAttributes(); if (userPA.getTotalSize() != prconfPA.getTotalSize()) { throw new IllegalStateException( LocalizedStrings.PartitionedRegion_TOTAL_SIZE_IN_PARTITIONATTRIBUTES_IS_INCOMPATIBLE_WITH_GLOBALLY_SET_TOTAL_SIZE_SET_THE_TOTAL_SIZE_TO_0MB .toLocalizedString(Long.valueOf(prconfPA.getTotalSize()))); } if (userPA.getRedundantCopies() != prconfPA.getRedundantCopies()) { throw new IllegalStateException( LocalizedStrings.PartitionedRegion_REQUESTED_REDUNDANCY_0_IS_INCOMPATIBLE_WITH_EXISTING_REDUNDANCY_1 .toLocalizedString(new Object[] { Integer.valueOf(userPA.getRedundantCopies()), Integer.valueOf(prconfPA.getRedundantCopies()) })); } if (prconf.isFirstDataStoreCreated() && pr.isDataStore()) { validateDistributedEvictionAttributes(prconf.getEvictionAttributes()); } Scope prconfScope = prconf.getScope(); Scope myScope = pr.getScope(); if (!myScope.equals(prconfScope)) { throw new IllegalStateException( LocalizedStrings.PartitionedRegion_SCOPE_IN_PARTITIONATTRIBUTES_IS_INCOMPATIBLE_WITH_ALREADY_SET_SCOPESET_THE_SCOPE_TO_0 .toLocalizedString(prconfScope)); } final int prconfTotalNumBuckets = prconfPA.getTotalNumBuckets(); if (userPA.getTotalNumBuckets() != prconfTotalNumBuckets) { throw new IllegalStateException( LocalizedStrings.PartitionedRegion_THE_TOTAL_NUMBER_OF_BUCKETS_FOUND_IN_PARTITIONATTRIBUTES_0_IS_INCOMPATIBLE_WITH_THE_TOTAL_NUMBER_OF_BUCKETS_USED_BY_OTHER_DISTRIBUTED_MEMBERS_SET_THE_NUMBER_OF_BUCKETS_TO_1 .toLocalizedString(new Object[] { Integer.valueOf(userPA.getTotalNumBuckets()), Integer.valueOf(prconfTotalNumBuckets) })); } validatePartitionListeners(prconf, userPA); validatePartitionResolver(prconf, userPA); validateColocatedWith(prconf, userPA); validateExpirationAttributes(pr.getAttributes(), prconf); } private void validatePartitionListeners(final PartitionRegionConfig prconf, final PartitionAttributes userPA) { ArrayList<String> prconfList = prconf.getPartitionListenerClassNames(); if (userPA.getPartitionListeners() == null && userPA.getPartitionListeners().length == 0 && prconfList != null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_PARTITION_LISTENER .toLocalizedString(new Object[] { null, prconfList })); } if (userPA.getPartitionListeners() != null && prconfList != null) { ArrayList<String> userPRList = new ArrayList<String>(); for (int i = 0; i < userPA.getPartitionListeners().length; i++) { userPRList.add(userPA.getPartitionListeners()[i].getClass().getName()); } if (userPA.getPartitionListeners().length != prconfList.size()) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_PARTITION_LISTENER .toLocalizedString(new Object[] { userPRList, prconfList })); } for (String listener : prconfList) { if (!(userPRList.contains(listener))) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_PARTITION_LISTENER .toLocalizedString(new Object[] { userPRList, prconfList })); } } } } private void validatePartitionResolver(final PartitionRegionConfig prconf, final PartitionAttributes userPA) { /* if (userPA.getPartitionResolver() == null && prconf.getPartitionResolverClassName() != null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_PARTITION_RESOLVER .toLocalizedString(new Object[] { "null", prconf.getPartitionResolverClassName() })); }*/ if (userPA.getPartitionResolver() != null && prconf.getResolverClassName() != null) { if (!(prconf.getResolverClassName().equals(userPA .getPartitionResolver().getClass().getName()))) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_PARTITION_RESOLVER .toLocalizedString(new Object[] { userPA.getPartitionResolver().getClass().getName(), prconf.getResolverClassName() })); } } } private void validateColocatedWith(final PartitionRegionConfig prconf, final PartitionAttributes userPA) { if (userPA.getColocatedWith() == null && prconf.getColocatedWith() != null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_COLOCATED_WITH .toLocalizedString(new Object[] { "null", prconf.getColocatedWith() })); } if (userPA.getColocatedWith() != null && prconf.getColocatedWith() != null) { if (!(prconf.getColocatedWith().equals(userPA.getColocatedWith()))) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_COLOCATED_WITH .toLocalizedString(new Object[] { userPA.getColocatedWith(), prconf.getColocatedWith() })); } } } private void validateExpirationAttributes(final RegionAttributes userRA, final PartitionRegionConfig prconf) { if (!userRA.getRegionIdleTimeout().equals(prconf.getRegionIdleTimeout())) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_EXPIRATION_ATTRIBUETS .toLocalizedString(new Object[] { " region idle timout " })); } if (!userRA.getRegionTimeToLive().equals(prconf.getRegionTimeToLive())) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_EXPIRATION_ATTRIBUETS .toLocalizedString(new Object[] { " region time to live " })); } if (!userRA.getEntryIdleTimeout().equals(prconf.getEntryIdleTimeout())) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_EXPIRATION_ATTRIBUETS .toLocalizedString(new Object[] { " entry idle timout " })); } if (!userRA.getEntryTimeToLive().equals(prconf.getEntryTimeToLive())) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_INCOMPATIBLE_EXPIRATION_ATTRIBUETS .toLocalizedString(new Object[] { " entry time to live " })); } } /** * The 2nd step of Eviction Attributes validation to ensure that all VMs are * reasonably similar to prevent weird config. issues. * * @param prconfEa * the eviction attributes currently used by other VMs * @see AttributesFactory#validateAttributes(RegionAttributes) * @see #validateEvictionAttributesAgainstLocalMaxMemory() */ private void validateDistributedEvictionAttributes( final EvictionAttributes prconfEa) { final EvictionAttributes ea = pr.getAttributes().getEvictionAttributes(); // there is no such thing as null EvictionAttributes, assert that is true Assert.assertTrue(ea != null); Assert.assertTrue(prconfEa != null); // Enforce that all VMs with this PR have the same Eviction Attributes // Even an accessor should do this to stay consistent with all other // accessor validation/enforcement // *and* because an accessor can set the first/global eviction attributes // If this is an accessor with Evicitonttributes, log an info message // indicating that no eviction will // occur in this VM (duh) // Further validation should occur for datastores to ensure consistent // behavior wrt local max memory and // total number of buckets final boolean equivAlgoAndAction = ea.getAlgorithm().equals( prconfEa.getAlgorithm()) && ea.getAction().equals(prconfEa.getAction()); if (!equivAlgoAndAction) { throw new IllegalStateException("For Partitioned Region " + pr.getFullPath() + " the configured EvictionAttributes " + ea + EVICTION_ATTRIBUTES_ARE_INCOMPATIBLE_MESSAGE + prconfEa); } else { // Same algo, action... // It is ok to have disparate heap or memory sizes since different // VMs may have different heap or memory sizes, particularly if // the action is overflow, but... // It *is* dangerous to locally destroy entries if all VMs don't have the // same maximum // basically the VM with the smallest maximum may cause erroneous misses // to occur. Warn the user, // but allow the configuration. if (ea.getAction().isLocalDestroy()) { // LRUHeap doesn't support maximum, but other eviction algos do if (! ea.getAlgorithm().isLRUHeap() && ea.getMaximum() != prconfEa.getMaximum()) { logger.warn(LocalizedMessage.create( LocalizedStrings.PartitionedRegion_0_EVICTIONATTRIBUTES_1_DO_NOT_MATCH_WITH_OTHER_2, new Object[] {pr.getFullPath(), ea, prconfEa})); } } } // end Same algo, action... } /** * The 3rd step of EvictionAttributes validation, where mutation is acceptible * This should be done before buckets are created. Validate EvictionAttributes * with respect to localMaxMemory potentially changing the eviction * attributes. * * @see AttributesFactory#validateAttributes(RegionAttributes) * @see #validateDistributedEvictionAttributes(EvictionAttributes) */ void validateEvictionAttributesAgainstLocalMaxMemory() { final EvictionAttributes ea = pr.getEvictionAttributes(); if (pr.getLocalMaxMemory()==0 && !ea.getAction().isNone()) { // This is an accessor which won't ever do eviction, say so logger.info(LocalizedMessage.create( LocalizedStrings.PartitionedRegion_EVICTIONATTRIBUTES_0_WILL_HAVE_NO_EFFECT_1_2, new Object[] { ea, pr.getFullPath(), Integer.valueOf(pr.localMaxMemory)})); } } /** * validates the persistence for datastores should match * between members */ void validatePersistentMatchBetweenDataStores(PartitionRegionConfig prconf) { final boolean isPersistent = pr.getAttributes().getDataPolicy() == DataPolicy.PERSISTENT_PARTITION; if (pr.getLocalMaxMemory()==0 || prconf==null) { return; } Set<Node> nodes = prconf.getNodes(); Iterator itor = nodes.iterator(); while (itor.hasNext()) { Node n = (Node)itor.next(); if (n.getPRType() != Node.ACCESSOR_DATASTORE) { continue; } else { if (n.isPersistent() != (pr.getAttributes().getDataPolicy() == DataPolicy.PERSISTENT_PARTITION)) { throw new IllegalStateException( "DataPolicy for Datastore members should all be persistent or not."); } } } } void validateColocation() { final PartitionAttributesImpl userPA = (PartitionAttributesImpl) pr.getAttributes() .getPartitionAttributes(); userPA.validateColocation(); // do this here to fix bug 47197 PartitionedRegion colocatedPR = ColocationHelper.getColocatedRegion(pr); if (colocatedPR != null) { if (colocatedPR.getPartitionAttributes().getTotalNumBuckets()!= userPA.getTotalNumBuckets()){ throw new IllegalStateException( "Colocated regions should have same number of total-num-buckets"); } if (colocatedPR.getPartitionAttributes().getRedundantCopies()!= userPA.getRedundantCopies()){ throw new IllegalStateException( "Colocated regions should have same number of redundant-copies"); } if ((colocatedPR.getPartitionAttributes().getLocalMaxMemory() == 0) && (userPA.getLocalMaxMemory() != 0)) { throw new IllegalStateException( "Colocated regions should have accessors at the same node"); } if ((colocatedPR.getLocalMaxMemory() != 0) && (userPA.getLocalMaxMemory() == 0)) { throw new IllegalStateException( "Colocated regions should have accessors at the same node"); } if (!pr.isShadowPR()) { if (pr.getAttributes().getDataPolicy().withPersistence()) { if (!colocatedPR.getDataPolicy().withPersistence()) { throw new IllegalStateException( "Cannot colocate a persistent region with a non persistent region"); } } } } } public void validateCacheLoaderWriterBetweenDataStores( PartitionRegionConfig prconf) { if (pr.getLocalMaxMemory() == 0 || prconf == null) { return; } Set<Node> nodes = prconf.getNodes(); Iterator itor = nodes.iterator(); while (itor.hasNext()) { Node n = (Node)itor.next(); if (n.getPRType() != Node.ACCESSOR_DATASTORE) { continue; } else { if (n.isCacheLoaderAttached() && pr.getAttributes().getCacheLoader() == null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_CACHE_LOADER_IS_NOTNULL_IN_PARTITIONED_REGION_0_ON_OTHER_DATASTORE .toLocalizedString(new Object[] { this.pr.getName() })); } if (!n.isCacheLoaderAttached() && pr.getAttributes().getCacheLoader() != null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_CACHE_LOADER_IS_NULL_IN_PARTITIONED_REGION_0_ON_OTHER_DATASTORE .toLocalizedString(new Object[] { this.pr.getName() })); } if (n.isCacheWriterAttached() && pr.getAttributes().getCacheWriter() == null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_CACHE_WRITER_IS_NOTNULL_IN_PARTITIONED_REGION_0_ON_OTHER_DATASTORE .toLocalizedString(new Object[] { this.pr.getName() })); } if (!n.isCacheWriterAttached() && pr.getAttributes().getCacheWriter() != null) { throw new IllegalStateException( LocalizedStrings.PartitionRegionConfigValidator_CACHE_WRITER_IS_NULL_IN_PARTITIONED_REGION_0_ON_OTHER_DATASTORE .toLocalizedString(new Object[] { this.pr.getName() })); } } } } void validateFixedPartitionAttributes() { if (this.pr.getFixedPartitionAttributesImpl() != null) { validatePrimaryFixedPartitionAttributes(); validateFixedPartitionAttributesAgainstRedundantCopies(); validateFixedPartitionAttributesAgainstTotalNumberBuckets(); } } /** * validate that for all partitions defined across all datastores, sum of * num-buckets is not more than total-num-buckets defined */ private void validateFixedPartitionAttributesAgainstTotalNumberBuckets() { for (FixedPartitionAttributesImpl fpa : this.pr .getFixedPartitionAttributesImpl()) { int numBuckets = 0; Set<FixedPartitionAttributesImpl> allFPAs = new HashSet<FixedPartitionAttributesImpl>( this.pr.getRegionAdvisor().adviseAllFixedPartitionAttributes()); allFPAs.add(fpa); for (FixedPartitionAttributes samefpa : allFPAs) { numBuckets = numBuckets + samefpa.getNumBuckets(); } if (numBuckets > this.pr.getTotalNumberOfBuckets()) { Object[] prms = new Object[] { this.pr.getName(), numBuckets, this.pr.getTotalNumberOfBuckets() }; throw new IllegalStateException( LocalizedStrings.PartitionedRegionConfigValidator_FOR_REGION_0_SUM_OF_NUM_BUCKETS_1_FOR_DIFFERENT_PRIMARY_PARTITIONS_SHOULD_NOT_BE_GREATER_THAN_TOTAL_NUM_BUCKETS_2 .toString(prms)); } } } /** * Validate that for the given partition, number if secondaries are never * exceed redundant copies defined Validate that the num-buckets defined for a * partition are same across all datastores */ private void validateFixedPartitionAttributesAgainstRedundantCopies() { for (FixedPartitionAttributesImpl fpa : this.pr.getFixedPartitionAttributesImpl()) { List<FixedPartitionAttributesImpl> allSameFPAs = this.pr.getRegionAdvisor().adviseSameFPAs(fpa); allSameFPAs.add(fpa); if (!allSameFPAs.isEmpty()) { int numSecondaries = 0; for (FixedPartitionAttributes otherfpa : allSameFPAs) { if (fpa.getNumBuckets() != otherfpa.getNumBuckets()) { Object[] prms = new Object[] { this.pr.getName(), fpa.getPartitionName(), fpa.getNumBuckets(), otherfpa.getNumBuckets() }; throw new IllegalStateException( LocalizedStrings.PartitionedRegionConfigValidator_FOR_REGION_0_FOR_PARTITION_1_NUM_BUCKETS_ARE_NOT_SAME_ACROSS_NODES .toString(prms)); } if (!otherfpa.isPrimary()) { if (++numSecondaries > (this.pr.getRedundantCopies())) { Object[] prms = new Object[] { this.pr.getName(), numSecondaries, fpa.getPartitionName(), this.pr.getRedundantCopies() }; throw new IllegalStateException( LocalizedStrings.PartitionedRegionConfigValidator_FOR_REGION_0_NUMBER_OF_SECONDARY_PARTITIONS_1_OF_A_PARTITION_2_SHOULD_NEVER_EXCEED_NUMBER_OF_REDUNDANT_COPIES_3 .toString(prms)); } } } } } } /** * Validate that same partition is not defined as primary on more that one * datastore */ private void validatePrimaryFixedPartitionAttributes() { List<FixedPartitionAttributesImpl> remotePrimaryFPAs = this.pr.getRegionAdvisor().adviseRemotePrimaryFPAs(); for (FixedPartitionAttributes fpa : this.pr.getFixedPartitionAttributesImpl()) { if (fpa.isPrimary() && remotePrimaryFPAs.contains(fpa)) { Object[] prms = new Object[]{this.pr.getName(), fpa.getPartitionName()}; throw new DuplicatePrimaryPartitionException( LocalizedStrings.PartitionedRegionConfigValidator_FOR_REGION_0_SAME_PARTITION_NAME_1_CANNOT_BE_DEFINED_AS_PRIMARY_ON_MORE_THAN_ONE_NODE .toString(prms)); } } } /** * @param prconf */ public void validateFixedPABetweenDataStores(PartitionRegionConfig prconf) { boolean isDataStore = this.pr.localMaxMemory > 0; boolean isFixedPR = this.pr.fixedPAttrs != null; Set<Node> nodes = prconf.getNodes(); Iterator<Node> itr = nodes.iterator(); while (itr.hasNext()) { Node n = itr.next(); if (isFixedPR) { if (n.getPRType() == Node.DATASTORE || n.getPRType() == Node.ACCESSOR_DATASTORE) { // throw exception Object[] prms = new Object[] { pr.getName() }; throw new IllegalStateException( LocalizedStrings.PartitionedRegionConfigValidator_FIXED_PARTITION_REGION_ONE_DATASTORE_IS_WITHOUTFPA .toLocalizedString(prms)); } } else { if (isDataStore) { if (n.getPRType() == Node.FIXED_PR_ACCESSOR || n.getPRType() == Node.FIXED_PR_DATASTORE) { // throw Exception Object[] prms = new Object[] { pr.getName() }; throw new IllegalStateException( LocalizedStrings.PartitionedRegionConfigValidator_FIXED_PARTITION_REGION_ONE_DATASTORE_IS_WITHOUTFPA .toLocalizedString(prms)); } } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.api; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.tez.dag.api.event.VertexState; import org.apache.tez.runtime.api.Event; import org.apache.tez.runtime.api.InputSpecUpdate; import org.apache.tez.runtime.api.VertexStatistics; import org.apache.tez.runtime.api.events.CustomProcessorEvent; import org.apache.tez.runtime.api.events.InputDataInformationEvent; import org.apache.tez.common.Preconditions; /** * Object with API's to interact with the Tez execution engine */ @Unstable @Public public interface VertexManagerPluginContext { public class ScheduleTaskRequest { int taskIndex; TaskLocationHint locationHint; public static ScheduleTaskRequest create(int taskIndex, @Nullable TaskLocationHint locationHint) { return new ScheduleTaskRequest(taskIndex, locationHint); } private ScheduleTaskRequest(int taskIndex, @Nullable TaskLocationHint locationHint) { Preconditions.checkState(taskIndex >= 0); this.taskIndex = taskIndex; this.locationHint = locationHint; } public int getTaskIndex() { return taskIndex; } public TaskLocationHint getTaskLocationHint() { return locationHint; } } @Deprecated public class TaskWithLocationHint { Integer taskIndex; TaskLocationHint locationHint; public TaskWithLocationHint(Integer taskIndex, @Nullable TaskLocationHint locationHint) { Preconditions.checkState(taskIndex != null); this.taskIndex = taskIndex; this.locationHint = locationHint; } public Integer getTaskIndex() { return taskIndex; } public TaskLocationHint getTaskLocationHint() { return locationHint; } } /** * Get the edge properties on the input edges of this vertex. The input edge * is represented by the source vertex name * @return Map of source vertex name and edge property */ public Map<String, EdgeProperty> getInputVertexEdgeProperties(); /** * Get the edge properties on the output edges of this vertex. The output edge * is represented by the destination vertex name * @return Map of destination vertex name and edge property */ public Map<String, EdgeProperty> getOutputVertexEdgeProperties(); /** * Get a {@link VertexStatistics} object to find out execution statistics * about the given {@link Vertex}. * <br>This only provides point in time values for statistics (completed tasks) * and must be called again to get updated values. * * @param vertexName * Name of the {@link Vertex} * @return {@link VertexStatistics} for the given vertex */ public VertexStatistics getVertexStatistics(String vertexName); /** * Get the name of the vertex * @return Vertex name */ public String getVertexName(); /** * Get the payload set for the plugin * @return user payload */ public UserPayload getUserPayload(); /** * Get the number of tasks in the given vertex * @param vertexName * @return Total number of tasks in this vertex */ public int getVertexNumTasks(String vertexName); /** * Get the resource allocated to a task of this vertex * @return Resource */ Resource getVertexTaskResource(); /** * Get the total resource allocated to this vertex. If the DAG is running in * a busy cluster then it may have no resources available dedicated to it. The * DAG may divide its available resource among member vertices. * @return Resource */ Resource getTotalAvailableResource(); /** * Get the number of nodes in the cluster * @return Number of nodes */ int getNumClusterNodes(); /** * Set the new parallelism (number of tasks) of this vertex, * Map of source (input) vertices and edge managers to change the event routing * between the source tasks and the new destination tasks and the number of physical inputs for root inputs. * This API can change the parallelism only once. Subsequent attempts will be * disallowed * @param parallelism New number of tasks in the vertex * @param locationHint the placement policy for tasks. * @param sourceEdgeManagers Edge Managers to be updated * @param rootInputSpecUpdate Updated Root Input specifications, if any. * If none specified, a default of 1 physical input is used */ @Deprecated public void setVertexParallelism(int parallelism, @Nullable VertexLocationHint locationHint, @Nullable Map<String, EdgeManagerPluginDescriptor> sourceEdgeManagers, @Nullable Map<String, InputSpecUpdate> rootInputSpecUpdate); /** * API to reconfigure a {@link Vertex} that is reading root inputs based on * the data read from the root inputs. Root inputs are external data sources * that provide the initial data for the DAG and are added to the * {@link Vertex} using the * {@link Vertex#addDataSource(String, DataSourceDescriptor)} API. Typically, * the parallelism of such vertices is determined at runtime by gathering * information about the data source. This API may be used to set the * parallelism of the vertex at runtime based on the data sources, as well as * changing the specification for those inputs. In addition, changing * parallelism is often accompanied by changing the {@link EdgeProperty} of * the source {@link Edge} because event routing between source and * destination tasks may need to be updated to account for the new task * parallelism. This method can be called to update the parallelism multiple * times until any of the tasks of the vertex have been scheduled (by invoking * {@link #scheduleTasks(List)}. If needed, the original source edge * properties may be obtained via {@link #getInputVertexEdgeProperties()} * * @param parallelism * New number of tasks in the vertex * @param locationHint * the placement policy for tasks specified at * {@link VertexLocationHint}s * @param sourceEdgeProperties * Map with Key=name of {@link Edge} to be updated and Value= * {@link EdgeProperty}. The name of the Edge will be the * corresponding source vertex name. * @param rootInputSpecUpdate * The key of the map is the name of the data source and the value is * the updated {@link InputSpecUpdate} for that data source. If none * specified, a default value is used. See {@link InputSpecUpdate} * for details. */ public void reconfigureVertex(int parallelism, @Nullable VertexLocationHint locationHint, @Nullable Map<String, EdgeProperty> sourceEdgeProperties, @Nullable Map<String, InputSpecUpdate> rootInputSpecUpdate); /** * API to reconfigure a {@link Vertex} by changing its task parallelism. Task * parallelism is often accompanied by changing the {@link EdgeProperty} of * the source {@link Edge} because event routing between source and * destination tasks may need to be updated to account for the new task * parallelism. This method can be called to update the parallelism multiple * times until any of the tasks of the vertex have been scheduled (by invoking * {@link #scheduleTasks(List)}. If needed, the original source edge * properties may be obtained via {@link #getInputVertexEdgeProperties()} * * @param parallelism * New number of tasks in the vertex * @param locationHint * the placement policy for tasks specified at * {@link VertexLocationHint}s * @param sourceEdgeProperties * Map with Key=name of {@link Edge} to be updated and Value= * {@link EdgeProperty}. The name of the Edge will be the * corresponding source vertex name. */ public void reconfigureVertex(int parallelism, @Nullable VertexLocationHint locationHint, @Nullable Map<String, EdgeProperty> sourceEdgeProperties); /** * API to reconfigure a {@link Vertex} that is reading root inputs based on * the data read from the root inputs. Root inputs are external data sources * that provide the initial data for the DAG and are added to the * {@link Vertex} using the * {@link Vertex#addDataSource(String, DataSourceDescriptor)} API. Typically, * the parallelism of such vertices is determined at runtime by gathering * information about the data source. This API may be used to set the * parallelism of the vertex at runtime based on the data sources, as well as * changing the specification for those inputs. * @param rootInputSpecUpdate * The key of the map is the name of the data source and the value is * the updated {@link InputSpecUpdate} for that data source. If none * specified, a default value is used. See {@link InputSpecUpdate} * for details. * @param locationHint * the placement policy for tasks specified at * {@link VertexLocationHint}s * @param parallelism * New number of tasks in the vertex */ public void reconfigureVertex(@Nullable Map<String, InputSpecUpdate> rootInputSpecUpdate, @Nullable VertexLocationHint locationHint, int parallelism); /** * Allows a VertexManagerPlugin to assign Events for Root Inputs * * For regular Event Routing changes - the EdgeManager should be configured * via the setVertexParallelism method * * @param inputName * The input name associated with the event * @param events * The list of Events to be assigned to various tasks belonging to * the Vertex. The target index on individual events represents the * task to which events need to be sent. */ public void addRootInputEvents(String inputName, Collection<InputDataInformationEvent> events); /** * Allows a VertexManagerPlugin to send events of custom payload to processor * of a specific task of managed vertex * * It's up to user to make sure taskId is valid * * @param events events to be sent * @param taskId id of a task of managed vertex */ public void sendEventToProcessor(Collection<CustomProcessorEvent> events, int taskId); @Deprecated /** * Replaced by {@link #scheduleTasks(List)} * Notify the vertex to start the given tasks * @param tasks Indices of the tasks to be started */ public void scheduleVertexTasks(List<TaskWithLocationHint> tasks); /** * Notify the vertex to schedule the given tasks * @param tasks Identifier and metadata for the tasks to schedule */ public void scheduleTasks(List<ScheduleTaskRequest> tasks); /** * Get the names of the non-vertex inputs of this vertex. These are primary * sources of data. * @return Names of inputs to this vertex. Maybe null if there are no inputs */ @Nullable public Set<String> getVertexInputNames(); /** * Set the placement hint for tasks in this vertex * * @param locationHint */ public void setVertexLocationHint(VertexLocationHint locationHint); /** * @return DAG Attempt number */ public int getDAGAttemptNumber(); /** * Register to get notifications on updates to the specified vertex. Notifications will be sent * via {@link VertexManagerPlugin#onVertexStateUpdated(org.apache.tez.dag.api.event.VertexStateUpdate)} * * This method can only be invoked once. Duplicate invocations will result in an error. * * @param vertexName the vertex name for which notifications are required. * @param stateSet the set of states for which notifications are required. null implies all */ void registerForVertexStateUpdates(String vertexName, @Nullable Set<VertexState> stateSet); /** * Optional API. No need to call this when the vertex is not fully defined to * start with. E.g. vertex parallelism is not defined, or edges are not * configured. In that case, Tez will assume that the vertex needs * reconfiguration. If the vertex is already fully defined, but the * {@link VertexManagerPlugin} wants to reconfigure the vertex, then it must * use this API to inform Tez about its intention. Without invoking this * method, it is invalid to re-configure the vertex if * the vertex is already fully defined. This can be invoked at any time until * {@link VertexManagerPlugin#initialize()} has completed. Its invalid to * invoke this method after {@link VertexManagerPlugin#initialize()} has * completed<br> * If this API is invoked, then {@link #doneReconfiguringVertex()} must be * invoked after the {@link VertexManagerPlugin} is done reconfiguring the * vertex, . Actions like scheduling tasks or sending events do not count as * reconfiguration. */ public void vertexReconfigurationPlanned(); /** * Optional API. This needs to be called only if {@link #vertexReconfigurationPlanned()} has been * invoked. This must be called after {@link #vertexReconfigurationPlanned()} is called. */ public void doneReconfiguringVertex(); /** * Optional API. This API can be invoked to declare that the * {@link VertexManagerPlugin} is done with its work. After this the system * will not invoke the plugin methods any more. Its invalid for the plugin to * make further invocations of the context APIs after this. This can be used * to stop receiving further {@link VertexState} notifications after the * plugin has made all changes. */ // TODO must be done later after TEZ-1714 //public void vertexManagerDone(); /** * Get input vertex groups of this vertex, including vertex group name and * all members vertex name * * @return map whose key is vertex group name and value is list of members' name, * or empty map if there is no input vertex group. */ Map<String, List<String>> getInputVertexGroups(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; import com.google.common.annotations.VisibleForTesting; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import org.apache.beam.runners.core.construction.SerializablePipelineOptions; import org.apache.beam.runners.flink.FlinkPipelineOptions; import org.apache.beam.runners.flink.metrics.FlinkMetricContainer; import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.sdk.values.ValueWithRecordId; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.StoppableFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.OperatorStateStore; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.CheckpointListener; import org.apache.flink.runtime.state.DefaultOperatorStateBackend; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source. */ public class UnboundedSourceWrapper<OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark> extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>> implements ProcessingTimeCallback, StoppableFunction, CheckpointListener, CheckpointedFunction { private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class); private final String stepName; /** Keep the options so that we can initialize the localReaders. */ private final SerializablePipelineOptions serializedOptions; /** For snapshot and restore. */ private final KvCoder<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder; /** * The split sources. We split them in the constructor to ensure that all parallel sources are * consistent about the split sources. */ private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources; /** The local split sources. Assigned at runtime when the wrapper is executed in parallel. */ private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources; /** * The local split readers. Assigned at runtime when the wrapper is executed in parallel. Make it * a field so that we can access it in {@link #onProcessingTime(long)} for emitting watermarks. */ private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders; /** * Flag to indicate whether the source is running. Initialize here and not in run() to prevent * races where we cancel a job before run() is ever called or run() is called after cancel(). */ private volatile boolean isRunning = true; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new * triggers. */ private transient StreamingRuntimeContext runtimeContext; /** * Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting * watermarks. */ private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context; /** Pending checkpoints which have not been acknowledged yet. */ private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints; /** Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}. */ private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32; private transient ListState< KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint; /** false if checkpointCoder is null or no restore state by starting first. */ private transient boolean isRestored = false; @SuppressWarnings("unchecked") public UnboundedSourceWrapper( String stepName, PipelineOptions pipelineOptions, UnboundedSource<OutputT, CheckpointMarkT> source, int parallelism) throws Exception { this.stepName = stepName; this.serializedOptions = new SerializablePipelineOptions(pipelineOptions); if (source.requiresDeduping()) { LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source); } Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder(); if (checkpointMarkCoder == null) { LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots."); checkpointCoder = null; } else { Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder = (Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() {}); checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder); } // get the splits early. we assume that the generated splits are stable, // this is necessary so that the mapping of state to source is correct // when restoring splitSources = source.split(parallelism, pipelineOptions); } /** Initialize and restore state before starting execution of the source. */ @Override public void open(Configuration parameters) throws Exception { runtimeContext = (StreamingRuntimeContext) getRuntimeContext(); // figure out which split sources we're responsible for int subtaskIndex = runtimeContext.getIndexOfThisSubtask(); int numSubtasks = runtimeContext.getNumberOfParallelSubtasks(); localSplitSources = new ArrayList<>(); localReaders = new ArrayList<>(); pendingCheckpoints = new LinkedHashMap<>(); if (isRestored) { // restore the splitSources from the checkpoint to ensure consistent ordering for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored : stateForCheckpoint.get()) { localSplitSources.add(restored.getKey()); localReaders.add( restored.getKey().createReader(serializedOptions.get(), restored.getValue())); } } else { // initialize localReaders and localSources from scratch for (int i = 0; i < splitSources.size(); i++) { if (i % numSubtasks == subtaskIndex) { UnboundedSource<OutputT, CheckpointMarkT> source = splitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = source.createReader(serializedOptions.get(), null); localSplitSources.add(source); localReaders.add(reader); } } } LOG.info( "Unbounded Flink Source {}/{} is reading from sources: {}", subtaskIndex, numSubtasks, localSplitSources); } @Override public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception { context = ctx; FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext()); ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker = new ReaderInvocationUtil<>(stepName, serializedOptions.get(), metricContainer); if (localReaders.isEmpty()) { // It can happen when value of parallelism is greater than number of IO readers (for example, // parallelism is 2 and number of Kafka topic partitions is 1). In this case, we just fall // through to idle this executor. LOG.info("Number of readers is 0 for this task executor, idle"); // set this, so that the later logic will emit a final watermark and then decide whether // to idle or not isRunning = false; } else if (localReaders.size() == 1) { // the easy case, we just read from one reader UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0); synchronized (ctx.getCheckpointLock()) { boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } } setNextWatermarkTimer(this.runtimeContext); while (isRunning) { boolean dataAvailable; synchronized (ctx.getCheckpointLock()) { dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); } } if (!dataAvailable) { Thread.sleep(50); } } } else { // a bit more complicated, we are responsible for several localReaders // loop through them and sleep if none of them had any data int numReaders = localReaders.size(); int currentReader = 0; // start each reader and emit data if immediately available for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { synchronized (ctx.getCheckpointLock()) { boolean dataAvailable = readerInvoker.invokeStart(reader); if (dataAvailable) { emitElement(ctx, reader); } } } setNextWatermarkTimer(this.runtimeContext); // a flag telling us whether any of the localReaders had data // if no reader had data, sleep for bit boolean hadData = false; while (isRunning) { UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader); synchronized (ctx.getCheckpointLock()) { boolean dataAvailable = readerInvoker.invokeAdvance(reader); if (dataAvailable) { emitElement(ctx, reader); hadData = true; } } currentReader = (currentReader + 1) % numReaders; if (currentReader == 0 && !hadData) { Thread.sleep(50); } else if (currentReader == 0) { hadData = false; } } } ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class); if (!options.isShutdownSourcesOnFinalWatermark()) { // do nothing, but still look busy ... // we can't return here since Flink requires that all operators stay up, // otherwise checkpointing would not work correctly anymore // // See https://issues.apache.org/jira/browse/FLINK-2491 for progress on this issue // wait until this is canceled final Object waitLock = new Object(); while (isRunning) { try { // Flink will interrupt us at some point //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (waitLock) { // don't wait indefinitely, in case something goes horribly wrong waitLock.wait(1000); } } catch (InterruptedException e) { if (!isRunning) { // restore the interrupted state, and fall through the loop Thread.currentThread().interrupt(); } } } } } /** Emit the current element from the given Reader. The reader is guaranteed to have data. */ private void emitElement( SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx, UnboundedSource.UnboundedReader<OutputT> reader) { // make sure that reader state update and element emission are atomic // with respect to snapshots OutputT item = reader.getCurrent(); byte[] recordId = reader.getCurrentRecordId(); Instant timestamp = reader.getCurrentTimestamp(); WindowedValue<ValueWithRecordId<OutputT>> windowedValue = WindowedValue.of( new ValueWithRecordId<>(item, recordId), timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING); ctx.collectWithTimestamp(windowedValue, timestamp.getMillis()); } @Override public void close() throws Exception { super.close(); if (localReaders != null) { for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { reader.close(); } } } @Override public void cancel() { isRunning = false; } @Override public void stop() { isRunning = false; } // ------------------------------------------------------------------------ // Checkpoint and restore // ------------------------------------------------------------------------ @Override public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception { if (!isRunning) { LOG.debug("snapshotState() called on closed source"); } else { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } stateForCheckpoint.clear(); long checkpointId = functionSnapshotContext.getCheckpointId(); // we checkpoint the sources along with the CheckpointMarkT to ensure // than we have a correct mapping of checkpoints to sources when // restoring List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size()); for (int i = 0; i < localSplitSources.size(); i++) { UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i); UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i); @SuppressWarnings("unchecked") CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark(); checkpointMarks.add(mark); KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv = KV.of(source, mark); stateForCheckpoint.add(kv); } // cleanup old pending checkpoints and add new checkpoint int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS; if (diff >= 0) { for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); diff >= 0; diff--) { iterator.next(); iterator.remove(); } } pendingCheckpoints.put(checkpointId, checkpointMarks); } } @Override public void initializeState(FunctionInitializationContext context) throws Exception { if (checkpointCoder == null) { // no checkpoint coder available in this source return; } OperatorStateStore stateStore = context.getOperatorStateStore(); CoderTypeInformation<KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder); stateForCheckpoint = stateStore.getOperatorState( new ListStateDescriptor<>( DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME, typeInformation.createSerializer(new ExecutionConfig()))); if (context.isRestored()) { isRestored = true; LOG.info("Having restore state in the UnbounedSourceWrapper."); } else { LOG.info("No restore state for UnbounedSourceWrapper."); } } @Override public void onProcessingTime(long timestamp) { if (this.isRunning) { synchronized (context.getCheckpointLock()) { // find minimum watermark over all localReaders long watermarkMillis = Long.MAX_VALUE; for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) { Instant watermark = reader.getWatermark(); if (watermark != null) { watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis); } } context.emitWatermark(new Watermark(watermarkMillis)); if (watermarkMillis >= BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis()) { this.isRunning = false; } } setNextWatermarkTimer(this.runtimeContext); } } // the callback is ourselves so there is nothing meaningful we can do with the ScheduledFuture @SuppressWarnings("FutureReturnValueIgnored") private void setNextWatermarkTimer(StreamingRuntimeContext runtime) { if (this.isRunning) { long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval(); synchronized (context.getCheckpointLock()) { long timeToNextWatermark = runtime.getProcessingTimeService().getCurrentProcessingTime() + watermarkInterval; runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this); } } } /** Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() { return splitSources; } /** Visible so that we can check this in tests. Must not be used for anything else. */ @VisibleForTesting public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() { return localSplitSources; } @Override public void notifyCheckpointComplete(long checkpointId) throws Exception { List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId); if (checkpointMarks != null) { // remove old checkpoints including the current one Iterator<Long> iterator = pendingCheckpoints.keySet().iterator(); long currentId; do { currentId = iterator.next(); iterator.remove(); } while (currentId != checkpointId); // confirm all marks for (CheckpointMarkT mark : checkpointMarks) { mark.finalizeCheckpoint(); } } } }
package com.mysticplanet.virtualperry; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.Vector; import javax.xml.parsers.ParserConfigurationException; import org.ispeech.MarkerHolder; import org.ispeech.SpeechSynthesis; import org.ispeech.SpeechSynthesisEvent; import org.ispeech.VisemeHolder; import org.ispeech.error.BusyException; import org.ispeech.error.InvalidApiKeyException; import org.ispeech.error.NoNetworkException; import org.xml.sax.SAXException; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.SharedPreferences; import android.media.AudioManager; import android.media.MediaPlayer; import android.net.Uri; import android.net.Uri.Builder; import android.os.Bundle; import android.preference.PreferenceManager; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.EditText; import android.widget.Toast; import com.mysticplanet.virtualperry.R; import com.mysticplanet.virtualperry.R.id; import com.mysticplanet.virtualperry.R.layout; import com.mysticplanet.virtualperry.R.menu; /** * * */ public class TTSActivity extends ActionBase { private static final String TAG = "VP"; SpeechSynthesis synthesis; Context _context; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); _context = this.getApplicationContext(); getActionBar().setDisplayHomeAsUpEnabled(true); setContentView(R.layout.tts); ((EditText) findViewById(R.id.text)).setText(R.string.tts_sample_text); findViewById(R.id.speak).setOnClickListener(new OnSpeakListener()); findViewById(R.id.stop).setOnClickListener(new OnStopListener()); prepareTTSEngine(); synthesis.setStreamType(AudioManager.STREAM_MUSIC); } private void prepareTTSEngine() { try { SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(getBaseContext()); String voiceType = sharedPref.getString("vp_lang_resp_type", "").replace(' ', '_'); synthesis = SpeechSynthesis.getInstance(this); synthesis.setVoiceType(voiceType); synthesis.addOptionalCommand("speed", sharedPref.getString("vp_voiceSpeed", "")); synthesis.addOptionalCommand("pitch", sharedPref.getString("vp_voicePitch", "")); synthesis.addOptionalCommand("bitrate", sharedPref.getString("vp_bitRate", "")); synthesis.addOptionalCommand("format","mp3"); synthesis.setSpeechSynthesisEvent(new SpeechSynthesisEvent() { public void onPlaySuccessful() { Log.i(TAG, "onPlaySuccessful"); } public void onPlayStopped() { Log.i(TAG, "onPlayStopped"); } public void onPlayFailed(Exception e) { Log.e(TAG, "onPlayFailed"); AlertDialog.Builder builder = new AlertDialog.Builder(TTSActivity.this); builder.setMessage("Error[TTSActivity]: " + e.toString()) .setCancelable(false) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }); AlertDialog alert = builder.create(); alert.show(); } public void onPlayStart() { Log.i(TAG, "onPlayStart"); } @Override public void onPlayCanceled() { Log.i(TAG, "onPlayCanceled"); } }); } catch (InvalidApiKeyException e) { Log.e(TAG, "Invalid API key\n" + e.getStackTrace()); Toast.makeText(_context, "ERROR: Invalid API key", Toast.LENGTH_LONG).show(); } } private class OnSpeakListener implements OnClickListener { public void onClick(View v) { // try { // String ttsText = ((EditText) findViewById(R.id.text)).getText().toString(); // byte [] b = synthesis.downloadByteArray(ttsText); // // if (b!=null){ // Log.d("DEBUG", "SUCESSSSSSSS!!!!!"); // MediaPlayer mediaPlayer; // mediaPlayer = new MediaPlayer(); // // File tempMp3 = File.createTempFile("test", ".mp3", getCacheDir()); // FileOutputStream fos = new FileOutputStream(tempMp3); // fos.write(b); // fos.close(); // // mediaPlayer = MediaPlayer.create(getApplicationContext(),Uri.fromFile(tempMp3)); // mediaPlayer.start(); // // //// mediaPlayer.setDataSource(); // }else{ // Log.d("DEBUG", "FAILURE :( "); // } // // } catch (BusyException e) { // Log.e(TAG, "SDK is busy"); // e.printStackTrace(); // Toast.makeText(_context, "ERROR: SDK is busy", Toast.LENGTH_LONG).show(); // } catch (NoNetworkException e) { // Log.e(TAG, "Network is not available\n" + e.getStackTrace()); // Toast.makeText(_context, "ERROR: Network is not available", Toast.LENGTH_LONG).show(); // } catch (IOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } try { String ttsText = ((EditText) findViewById(R.id.text)).getText().toString(); synthesis.speak(ttsText); } catch (BusyException e) { Log.e(TAG, "SDK is busy"); e.printStackTrace(); Toast.makeText(_context, "ERROR: SDK is busy", Toast.LENGTH_LONG).show(); } catch (NoNetworkException e) { Log.e(TAG, "Network is not available\n" + e.getStackTrace()); Toast.makeText(_context, "ERROR: Network is not available", Toast.LENGTH_LONG).show(); } } } public class OnStopListener implements OnClickListener { public void onClick(View v) { if (synthesis != null) { synthesis.stop(); } // VisemeHolder vh; // final String TAG = "VISEME INFO"; // // try { // vh = synthesis.getVisemeInfo("Hello World", "usenglishfemale", "0", "mp3"); // int frames = vh.getFrames(); // Log.d("DEBUG", "FRAMES:" + frames); // Log.d("DEBUG", "TOTAL LENGTH (in ms):" + vh.getTotalLength()); // for (int i = 0; i<frames; i++){ // // Log.d(TAG, "Start (in ms):" + vh.getStart(i)); // Log.d(TAG, "End (in ms):" + vh.getEnd(i)); // Log.d(TAG, "Length (in ms):" + vh.getLength(i)); // Log.d(TAG, "Frame:" + (i)); // Log.d(TAG, "Mouth:" + vh.getMouth(i)); // Log.d(TAG, " "); // } // } catch (Exception e) { // e.printStackTrace(); // } // MarkerHolder mh; // final String TAG = "MARKER INFO"; // // try { // mh = synthesis.getMarkerInfo("Hello World", "usenglishfemale", "0", "mp3"); // int words = mh.getWords(); // Log.d("DEBUG", "WORDS:" + words); // Log.d("DEBUG", "TOTAL LENGTH (in ms):" + mh.getTotalLength()); // for (int i = 0; i<words; i++){ // // Log.d("DEBUG", "Start (in ms):" + mh.getStart(i)); // Log.d("DEBUG", "End (in ms):" + mh.getEnd(i)); // Log.d("DEBUG", "Length (in ms):" + mh.getLength(i)); // Log.d("DEBUG", "Word:" + (i)); // Log.d("DEBUG", "Text:" + mh.getText(i)); // Log.d("DEBUG", " "); // } // } catch (Exception e) { // e.printStackTrace(); // } } } @Override protected void onPause() { synthesis.stop(); //Optional to stop the playback when the activity is paused super.onPause(); } }
/* * #%L * OW2 Chameleon - Core * %% * Copyright (C) 2009 - 2014 OW2 Chameleon * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.ow2.chameleon.core.activators; import org.osgi.framework.*; import org.osgi.framework.wiring.FrameworkWiring; import org.ow2.chameleon.core.services.AbstractDeployer; import org.ow2.chameleon.core.services.Deployer; import org.ow2.chameleon.core.utils.BundleHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.*; /** * Deployer installing and starting bundles. * * @author The OW2 Chameleon Team * @version $Id: 1.0.4 $Id */ public class BundleDeployer extends AbstractDeployer implements BundleActivator { /** * The URL prefix to enable 'reference'. */ public static final String REFERENCE_URL_PREFIX = "reference:"; /** * Flag indicating whether we use the {@literal reference://} protocol. */ private final boolean useReference; /** * Flag indicating whether we refresh bundles after un-installations and updates. */ private final boolean autoRefresh; /** * The managed bundles. */ Map<File, Bundle> bundles = new HashMap<File, Bundle>(); /** * The bundle context. */ private BundleContext context; /** * The logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(BundleDeployer.class); /** * Creates a bundle deployer. * * @param useReferences a boolean. */ public BundleDeployer(boolean useReferences, boolean autoRefresh) { this.autoRefresh = autoRefresh; this.useReference = useReferences; } /** * {@inheritDoc} */ @Override public void start(BundleContext context) { this.context = context; context.registerService(Deployer.class, this, null); } /** * {@inheritDoc} */ @Override public void stop(BundleContext context) { // The services are withdrawn automatically by the framework. } /** * {@inheritDoc} */ @Override public boolean accept(File file) { // If the file does not exist anymore, isFile returns false. return file.getName().endsWith(".jar") && (!file.isFile() || BundleHelper.isBundle(file)); } /** * {@inheritDoc} */ @Override public void onFileCreate(File file) { LOGGER.debug("File creation event received for {}", file.getAbsoluteFile()); synchronized (this) { if (bundles.containsKey(file)) { Bundle bundle = bundles.get(file); LOGGER.info("Updating bundle {} - {}", bundle.getSymbolicName(), file.getAbsoluteFile()); try { bundle.update(); refresh(); // Then try to start other not started bundles. tryToStartUnstartedBundles(bundle); // If the bundle we just update is not started, try to start it. // Obviously, this action is not done on fragment. if (bundle.getState() != Bundle.ACTIVE && !BundleHelper.isFragment(bundle)) { bundle.start(); } } catch (BundleException e) { LOGGER.error("Error during bundle update {} from {}", bundle.getSymbolicName(), file.getAbsoluteFile(), e); } catch (IllegalStateException e) { LOGGER.error("Cannot update the bundle " + file.getAbsolutePath() + " - the framework is either " + "stopping or restarting"); LOGGER.debug("Invalid bundle context", e); } } else { LOGGER.info("Installing bundle from {}", file.getAbsoluteFile()); try { Bundle bundle; if (useReference) { bundle = context.installBundle(REFERENCE_URL_PREFIX + file.toURI().toURL() .toExternalForm()); } else { bundle = context.installBundle(file.toURI().toURL().toExternalForm()); } bundles.put(file, bundle); if (!BundleHelper.isFragment(bundle)) { LOGGER.info("Starting bundle {} - {}", bundle.getSymbolicName(), file.getAbsoluteFile()); bundle.start(); } // We have installed a new bundle, let's see if it has an impact on the other one. tryToStartUnstartedBundles(bundle); } catch (Exception e) { LOGGER.error("Error during bundle installation of {}", file.getAbsoluteFile(), e); } } } } /** * Iterates over the set of bundles and try to start unstarted bundles. * This method is called when holding the monitor lock. * * @param bundle the installed bundle triggering this attempt. */ private void tryToStartUnstartedBundles(Bundle bundle) { for (Bundle b : bundles.values()) { if (!bundle.equals(b) && b.getState() != Bundle.ACTIVE && !BundleHelper.isFragment(b)) { LOGGER.debug("Trying to start bundle {} after having installed bundle {}", b.getSymbolicName(), bundle.getSymbolicName()); try { b.start(); } catch (BundleException e) { LOGGER.debug("Failed to start bundle {} after having installed bundle {}", b.getSymbolicName(), bundle.getSymbolicName(), e); } } } } /** * {@inheritDoc} * <p> * It's a good practice to install all bundles and then start them. * This method cannot be interrupted. */ @Override public void open(Collection<File> files) { List<Bundle> toStart = new ArrayList<Bundle>(); for (File file : files) { try { Bundle bundle; // Compute the url. if we use 'reference' prepend 'reference:' if (useReference) { bundle = context.installBundle(REFERENCE_URL_PREFIX + file.toURI().toURL().toExternalForm()); } else { bundle = context.installBundle(file.toURI().toURL().toExternalForm()); } bundles.put(file, bundle); if (!BundleHelper.isFragment(bundle)) { toStart.add(bundle); } } catch (Exception e) { // We catch any exception has it may be runtime exception (IllegalStateException). LOGGER.error("Error during bundle installation of {}", file.getAbsoluteFile(), e); } } // toStart contains only regular bundles (not the fragments). for (Bundle bundle : toStart) { try { bundle.start(); } catch (BundleException e) { LOGGER.error("Error during the starting of {}", bundle.getSymbolicName(), e); } } } /** * {@inheritDoc} */ @Override public void onFileDelete(File file) { Bundle bundle; synchronized (this) { bundle = bundles.remove(file); } if (bundle != null) { try { LOGGER.info("Uninstalling bundle {}", bundle.getSymbolicName()); bundle.uninstall(); refresh(); } catch (BundleException e) { LOGGER.error("Error during the un-installation of {}", bundle.getSymbolicName(), e); } } } public void refresh() { if (autoRefresh) { Bundle system = context.getBundle(0l); FrameworkWiring wiring = system.adapt(FrameworkWiring.class); LOGGER.debug("Refreshing bundles to cleanup stale references"); wiring.refreshBundles(null, new FrameworkListener() { @Override public void frameworkEvent(FrameworkEvent event) { if (event.getThrowable() != null) { //NOSONAR LOGGER.error("An error was detected while refreshing the bundles", event.getThrowable()); } } }); } } }
package cz.polankam.jjcron; import cz.polankam.jjcron.common.TaskMetadata; import cz.polankam.jjcron.common.TaskStats; import cz.polankam.jjcron.remote.TaskDetail; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; /** * Class which is responsible for creation of tasks and their scheduling. Pool * of tasks is maintaned and work with here, all tasks can be stopped through * api or new set of tasks can be realoaded. Parsing of tasks from crontab has * to be done elsewhere only task meta info is processed here. * <p> * After construction thread-safe structure.</p> * * @author Neloop */ public final class TaskScheduler { /** * Sleep interval which is used for checking on task pool termination. */ private static final int SLEEP_INTERVAL = 1; /** * Standard Java logger. */ private static final Logger logger = Logger.getLogger(TaskScheduler.class.getName()); /** * If set to true, than {@link #start(List)} function was called and tasks * are running. */ private final AtomicBoolean running; /** * If set to true, then termination of task pool was requested. */ private final AtomicBoolean exit; /** * Responsible for task scheduling. Its initialized with processor count * equal to real CPU processors (including HTT). */ private ScheduledExecutorService scheduler; /** * Task pool which is managed by this class instance. */ private final Map<String, TaskHolder> tasks; /** * Helps with construction of proper Task children objects. */ private final TaskFactory taskFactory; /** * Empty task factory which is used in case of unnecessary task factory. * Implemented due to gentle way of telling null dereference. */ private class NullTaskFactory implements TaskFactory { /** * Not implemented version of method which should not be used. * * @param taskMeta none * @return none * @throws TaskException always thrown due to not implemented * functionality */ @Override public Task createTask(TaskMetadata taskMeta) throws TaskException { throw new TaskException("TaskFactory cannot be used"); } } /** * Construct task scheduler with specified task factory, tasks are not * executed yet. All internal structures are initialized. * * @param taskFactory factory which helps constructing tasks, can be null */ public TaskScheduler(TaskFactory taskFactory) { logger.log(Level.INFO, "TaskManager was created"); this.exit = new AtomicBoolean(false); this.running = new AtomicBoolean(false); this.tasks = new HashMap<>(); this.scheduler = Executors.newScheduledThreadPool( Runtime.getRuntime().availableProcessors()); if (taskFactory == null) { taskFactory = new NullTaskFactory(); } this.taskFactory = taskFactory; } /** * Wait until tasks execution is terminated. * <p> * Thread-safe function.</p> */ public final void justWait() { try { while (!exit.get()) { TimeUnit.SECONDS.sleep(SLEEP_INTERVAL); } } catch (InterruptedException e) { logger.log(Level.WARNING, "Waiting thread was interrupted."); } } /** * Atomically sets termination of tasks execution and shutdown all current * executing tasks. * <p> * Thread-safe function.</p> */ public final void exit() { logger.log(Level.INFO, "Exit function was called." + " All scheduled tasks should be terminated"); scheduler.shutdownNow(); running.set(false); exit.set(true); } /** * Helper class which allows to run task and reschedule it after execution. */ private class RunTask implements Runnable { /** * {@link Task} associated with this object. */ private final TaskHolder taskHolder; /** * Given task is stored and time structure is extracted from it. * * @param task {@link TaskHolder} which will be executed in this object. */ public RunTask(TaskHolder task) { this.taskHolder = task; } /** * Run method which is executed by scheduler. Task is executed inside * and after execution is rescheduled to next timepoint. */ @Override public void run() { // run task itself try { long start = System.nanoTime(); taskHolder.task.run(); long end = System.nanoTime(); taskHolder.stats.record(LocalDateTime.now(), end - start); } catch (Exception e) { logger.log(Level.WARNING, "Task: {0} throws exception" + " while execution: {1}", new Object[]{taskHolder.task.name(), e.getMessage()}); } // ... and reschedule task to another time point scheduleTask(taskHolder); } } /** * Schedule given task to its first execution point. * <p> * Thread-safe function.</p> * * @param holder task which will be scheduled */ private synchronized void scheduleTask(TaskHolder holder) { running.set(true); long delay = holder.task.delay(LocalDateTime.now()); logger.log(Level.INFO, "Task {0} was scheduled to {1}", new Object[]{holder.task.name(), LocalDateTime.now().plusSeconds( holder.task.timeUnit().toSeconds(delay))}); scheduler.schedule(new RunTask(holder), delay, holder.task.timeUnit()); } /** * Create task from given <code>taskMeta</code> information and schedule it * to its frist time point. * <p> * Thread-safe function.</p> * * @param taskMeta information about task * @return identification of newly added task * @throws TaskException if task creation failed */ private synchronized String loadTask(TaskMetadata taskMeta) throws TaskException { String id = UUID.randomUUID().toString(); Task task = taskFactory.createTask(taskMeta); TaskHolder holder = new TaskHolder(id, task, new TaskStats()); tasks.put(id, holder); // schedule first execution scheduleTask(holder); return id; } /** * From {@link TaskMetadata} list create appropriate task and schedule them. * Tasks are scheduled for next execution timepoint, after execution they * are rescheduled. * <p> * Thread-safe function.</p> * * @param tasksMeta list of task meta information * @throws TaskException if task creation failed */ private synchronized void loadTasks(List<TaskMetadata> tasksMeta) throws TaskException { if (tasksMeta == null) { throw new TaskException("List of tasks cannot be null"); } for (TaskMetadata taskMeta : tasksMeta) { loadTask(taskMeta); } } /** * Create task and schedule it, can be used to start croning or during * execution. * <p> * Thread-safe function.</p> * * @param taskMeta information about task * @return identification of newly added task * @throws TaskException if task creation failed */ public final synchronized String addTask(TaskMetadata taskMeta) throws TaskException { return loadTask(taskMeta); } /** * Add given task to currently executing ones, can be used to start croning * or during execution. * <p> * Thread-safe function.</p> * * @param task task which will be added to internal ones * @return identification of newly added task * @throws TaskException if task was null */ public final synchronized String addTask(Task task) throws TaskException { String id = UUID.randomUUID().toString(); TaskHolder holder = new TaskHolder(id, task, new TaskStats()); tasks.put(id, holder); scheduleTask(holder); return id; } /** * Deletes tasks according to given unique identification. There is no way * how to stop scheduled task, so this function will stop whole execution * and after deletion starts it again. * <p> * Thread-safe function.</p> * * @param id unique identification of task */ public final synchronized void deleteTask(String id) { scheduler.shutdownNow(); scheduler = Executors.newScheduledThreadPool( Runtime.getRuntime().availableProcessors()); // actual deletion tasks.remove(id); for (Entry<String, TaskHolder> entry : tasks.entrySet()) { scheduleTask(entry.getValue()); } } /** * From given {@link TaskMetadata} list constructs all tasks and schedule * them to their first execution timepoint. Non-blocking function tasks are * only created and scheduled. If called second time, nothing will happen. * <p> * Thread-safe function.</p> * * @param tasksMeta list of task meta information * @throws TaskException if task creation failed */ public final synchronized void start(List<TaskMetadata> tasksMeta) throws TaskException { if (running.get() == false) { loadTasks(tasksMeta); running.set(true); } } /** * Reschedules all currently loaded tasks. Should be used only as * counterpart of {@link #stop()} function. Cannot be used as restart * running state is checked before any actions. * <p> * Thread-safe function.</p> */ public final synchronized void start() { if (running.get() == true) { return; } running.set(true); scheduler = Executors.newScheduledThreadPool( Runtime.getRuntime().availableProcessors()); for (Entry<String, TaskHolder> entry : tasks.entrySet()) { scheduleTask(entry.getValue()); } logger.log(Level.INFO, "Start was requested," + "all task will be rescheduled"); } /** * Stop all currenty executing tasks. It is counterpart of {@link #start()} * function. Multiple calls have no effect. * <p> * Thread-safe function.</p> */ public final synchronized void stop() { if (running.get() == false) { return; } running.set(false); scheduler.shutdownNow(); logger.log(Level.INFO, "Stop was requested, all task are stopped now"); } /** * Function stop all currently running tasks and load new ones. Should be * used only during execution, not to start execution of cron. * <p> * Thread-safe function.</p> * * @param tasksMeta list of task meta information * @throws TaskException if task creation failed */ public final synchronized void reloadTasks(List<TaskMetadata> tasksMeta) throws TaskException { logger.log(Level.INFO, "Task reload requested..."); scheduler.shutdownNow(); scheduler = Executors.newScheduledThreadPool( Runtime.getRuntime().availableProcessors()); tasks.clear(); loadTasks(tasksMeta); logger.log(Level.INFO, "Task reload done"); } /** * Determines if client already exited. * <p> * Thread-safe function.</p> * * @return true if client exits, false otherwise */ public final boolean isExited() { return exit.get(); } /** * Determines if client is running or not. * <p> * Thread-safe function.</p> * * @return true if client is running, false otherwise */ public final boolean isRunning() { return running.get(); } /** * Gets list of actual {@link Task} structures which are currently running * or scheduled. * <p> * Thread-safe function.</p> * * @return list of scheduled tasks */ public final synchronized List<Task> getTasks() { List<Task> result = new ArrayList<>(); for (Entry<String, TaskHolder> entry : tasks.entrySet()) { result.add(entry.getValue().task); } return result; } /** * Gets list of {@link TaskMetadata} structures from currently * active/scheduled cron tasks. * <p> * Thread-safe function.</p> * * @return list of {@link TaskMetadata} structures */ public final synchronized List<TaskMetadata> getTaskMetadatas() { List<TaskMetadata> result = new ArrayList<>(); for (Entry<String, TaskHolder> entry : tasks.entrySet()) { result.add(entry.getValue().task.metadata()); } return result; } /** * Gets list of {@link TaskDetail} structures which represents currently * scheduled cron tasks. * <p> * Thread-safe function.</p> * * @return list of {@link TaskDetail} structures */ public final synchronized List<TaskDetail> getTaskDetails() { List<TaskDetail> result = new ArrayList<>(); for (Entry<String, TaskHolder> entry : tasks.entrySet()) { TaskHolder holder = entry.getValue(); Task task = holder.task; LocalDateTime next = LocalDateTime.now(); next = next.plusSeconds(task.timeUnit().toSeconds(task.delay(next))); result.add(new TaskDetail(holder.id, task.name(), next, holder.stats, task.metadata())); } return result; } }
package uk.gov.ons.ctp.response.action.scheduled.distribution; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyListOf; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import java.math.BigInteger; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.Spy; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.cloud.sleuth.Span; import org.springframework.cloud.sleuth.Tracer; import org.springframework.data.domain.Pageable; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.support.TransactionTemplate; import ma.glasnost.orika.MapperFacade; import uk.gov.ons.ctp.common.FixtureHelper; import uk.gov.ons.ctp.common.distributed.DistributedListManager; import uk.gov.ons.ctp.common.state.StateTransitionManager; import uk.gov.ons.ctp.response.action.config.ActionDistribution; import uk.gov.ons.ctp.response.action.config.AppConfig; import uk.gov.ons.ctp.response.action.config.CaseSvc; import uk.gov.ons.ctp.response.action.domain.model.Action; import uk.gov.ons.ctp.response.action.domain.model.ActionPlan; import uk.gov.ons.ctp.response.action.domain.model.ActionType; import uk.gov.ons.ctp.response.action.domain.repository.ActionPlanRepository; import uk.gov.ons.ctp.response.action.domain.repository.ActionRepository; import uk.gov.ons.ctp.response.action.domain.repository.ActionTypeRepository; import uk.gov.ons.ctp.response.action.message.InstructionPublisher; import uk.gov.ons.ctp.response.action.message.instruction.ActionCancel; import uk.gov.ons.ctp.response.action.message.instruction.ActionRequest; import uk.gov.ons.ctp.response.action.representation.ActionDTO; import uk.gov.ons.ctp.response.action.representation.ActionDTO.ActionState; import uk.gov.ons.ctp.response.action.service.CaseSvcClientService; import uk.gov.ons.ctp.response.casesvc.representation.CaseDTO; import uk.gov.ons.ctp.response.casesvc.representation.CaseEventDTO; import uk.gov.ons.ctp.response.casesvc.representation.CaseGroupDTO; import uk.gov.ons.ctp.response.casesvc.representation.CategoryDTO; /** * Test the action distributor */ @RunWith(MockitoJUnitRunner.class) public class ActionDistributorTest { private static final int I_HATE_CHECKSTYLE_TEN = 10; @Spy private AppConfig appConfig = new AppConfig(); @Mock private InstructionPublisher instructionPublisher; @Mock Tracer tracer; @Mock Span span; @Mock private DistributedListManager<BigInteger> actionDistributionListManager; @Mock private StateTransitionManager<ActionState, uk.gov.ons.ctp.response.action.representation.ActionDTO.ActionEvent> actionSvcStateTransitionManager; @Mock private MapperFacade mapperFacade; @Mock private CaseSvcClientService caseSvcClientService; @Mock private ActionRepository actionRepo; @Mock private ActionPlanRepository actionPlanRepo; @Mock private ActionTypeRepository actionTypeRepo; @Mock private TransactionTemplate transactionTemplate; @Mock private PlatformTransactionManager platformTransactionManager; @InjectMocks private ActionDistributor actionDistributor; /** * A Test */ @Before public void setup() { CaseSvc caseSvcConfig = new CaseSvc(); ActionDistribution actionDistributionConfig = new ActionDistribution(); actionDistributionConfig.setDelayMilliSeconds(I_HATE_CHECKSTYLE_TEN); actionDistributionConfig.setRetrievalMax(I_HATE_CHECKSTYLE_TEN); actionDistributionConfig.setDistributionMax(I_HATE_CHECKSTYLE_TEN); actionDistributionConfig.setRetrySleepSeconds(I_HATE_CHECKSTYLE_TEN); appConfig.setCaseSvc(caseSvcConfig); appConfig.setActionDistribution(actionDistributionConfig); MockitoAnnotations.initMocks(this); } /** * Test that when we fail at first hurdle to load ActionTypes we do not go on * to call anything else In reality the wakeup mathod would then be called * again after a sleep interval by spring but we cannot test that here * * @throws Exception oops */ @Test public void testFailGetActionType() throws Exception { Mockito.when(actionTypeRepo.findAll()).thenThrow(new RuntimeException("Database access failed")); // let it roll actionDistributor.distribute(); // assert the right calls were made verify(actionTypeRepo).findAll(); verify(actionRepo, times(0)).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdInitialContact"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(actionRepo, times(0)).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdUploadIAC"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(caseSvcClientService, times(0)).getCase(eq(3)); verify(caseSvcClientService, times(0)).getCase(eq(4)); //TODO BRES fix this an all others in this test! // verify(caseSvcClientService, times(0)).getAddress(eq(FAKE_UPRN)); verify(caseSvcClientService, times(0)).getCaseEvents(eq(3)); verify(caseSvcClientService, times(0)).getCaseEvents(eq(4)); verify(caseSvcClientService, times(0)).createNewCaseEvent(any(Action.class), eq(CategoryDTO.CategoryType.ACTION_CREATED)); verify(instructionPublisher, times(0)).sendInstructions(eq("Printer"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); verify(instructionPublisher, times(0)).sendInstructions(eq("HHSurvey"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); } /** * Test that when we momentarily fail to call casesvc to GET two cases we * carry on trying and successfully deal with the actions/cases we can retrieve * @throws Exception oops */ @Test public void testFailCaseGet() throws Exception { List<ActionType> actionTypes = FixtureHelper.loadClassFixtures(ActionType[].class); List<ActionPlan> actionPlans = FixtureHelper.loadClassFixtures(ActionPlan[].class); List<Action> actionsHHIC = FixtureHelper.loadClassFixtures(Action[].class, "HouseholdInitialContact"); List<Action> actionsHHIACLOAD = FixtureHelper.loadClassFixtures(Action[].class, "HouseholdUploadIAC"); List<CaseDTO> caseDTOs = FixtureHelper.loadClassFixtures(CaseDTO[].class); // TODO BRES // List<AddressDTO> addressDTOsUprn1234 = FixtureHelper.loadClassFixtures(AddressDTO[].class, "uprn1234"); List<CaseEventDTO> caseEventDTOs = FixtureHelper.loadClassFixtures(CaseEventDTO[].class); // TODO BRES // List<CaseTypeDTO> caseTypeDTOs = FixtureHelper.loadClassFixtures(CaseTypeDTO[].class); List<CaseGroupDTO> caseGroupDTOs = FixtureHelper.loadClassFixtures(CaseGroupDTO[].class); List<CaseEventDTO> caseEventDTOsPost = FixtureHelper.loadClassFixtures(CaseEventDTO[].class, "post"); // wire up mock responses Mockito.when(actionPlanRepo.findOne(any(Integer.class))).thenReturn(actionPlans.get(0)); Mockito.when( actionSvcStateTransitionManager.transition(ActionState.SUBMITTED, ActionDTO.ActionEvent.REQUEST_DISTRIBUTED)) .thenReturn(ActionState.PENDING); Mockito.when(actionTypeRepo.findAll()).thenReturn(actionTypes); Mockito .when(actionRepo.findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdInitialContact"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class))) .thenReturn(actionsHHIC); Mockito.when( actionRepo.findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdUploadIAC"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class))) .thenReturn(actionsHHIACLOAD); Mockito.when(caseSvcClientService.getCase(eq(3))).thenReturn(caseDTOs.get(2)); Mockito.when(caseSvcClientService.getCase(eq(4))).thenReturn(caseDTOs.get(3)); // TODO BRES // Mockito.when(caseSvcClientService.getAddress(eq(FAKE_UPRN))) // .thenReturn(addressDTOsUprn1234.get(0)); // Mockito.when(caseSvcClientService.getCaseEvents(eq(3))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(2)})); Mockito.when(caseSvcClientService.getCaseEvents(eq(4))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(3)})); Mockito.when( caseSvcClientService.createNewCaseEvent(any(Action.class), eq(CategoryDTO.CategoryType.ACTION_CREATED))) .thenReturn(caseEventDTOsPost.get(2)); // TODO BRES // Mockito.when(caseSvcClientService.getCaseType(eq(1))).thenReturn(caseTypeDTOs.get(0)); Mockito.when(caseSvcClientService.getCaseGroup(eq(1))).thenReturn(caseGroupDTOs.get(0)); // let it roll actionDistributor.distribute(); // assert the right calls were made verify(actionTypeRepo).findAll(); verify(actionRepo).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdInitialContact"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(actionRepo).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdUploadIAC"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(caseSvcClientService).getCase(eq(3)); verify(caseSvcClientService).getCase(eq(4)); // TODO BRES // verify(caseSvcClientService, times(2)).getAddress(eq(FAKE_UPRN)); verify(caseSvcClientService).getCaseEvents(eq(3)); verify(caseSvcClientService).getCaseEvents(eq(4)); verify(caseSvcClientService, times(2)).createNewCaseEvent(any(Action.class), eq(CategoryDTO.CategoryType.ACTION_CREATED)); verify(instructionPublisher, times(0)).sendInstructions(eq("Printer"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); verify(instructionPublisher, times(1)).sendInstructions(eq("HHSurvey"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); } /** * Test BlueSky scenario - two action types, four cases etc resulting in two * calls to publish * @throws Exception oops */ @Test public void testBlueSky() throws Exception { List<ActionType> actionTypes = FixtureHelper.loadClassFixtures(ActionType[].class); List<Action> actionsHHIC = FixtureHelper.loadClassFixtures(Action[].class, "HouseholdInitialContact"); List<Action> actionsHHIACLOAD = FixtureHelper.loadClassFixtures(Action[].class, "HouseholdUploadIAC"); List<CaseDTO> caseDTOs = FixtureHelper.loadClassFixtures(CaseDTO[].class); // TODO BRES // List<CaseTypeDTO> caseTypeDTOs = FixtureHelper.loadClassFixtures(CaseTypeDTO[].class); List<CaseGroupDTO> caseGroupDTOs = FixtureHelper.loadClassFixtures(CaseGroupDTO[].class); // TODO BRES // List<AddressDTO> addressDTOsUprn1234 = FixtureHelper.loadClassFixtures(AddressDTO[].class, "uprn1234"); List<CaseEventDTO> caseEventDTOs = FixtureHelper.loadClassFixtures(CaseEventDTO[].class); List<ActionPlan> actionPlans = FixtureHelper.loadClassFixtures(ActionPlan[].class); List<CaseEventDTO> caseEventDTOsPost = FixtureHelper.loadClassFixtures(CaseEventDTO[].class, "post"); // wire up mock responses Mockito.when( actionSvcStateTransitionManager.transition(ActionState.SUBMITTED, ActionDTO.ActionEvent.REQUEST_DISTRIBUTED)) .thenReturn(ActionState.PENDING); Mockito.when(actionTypeRepo.findAll()).thenReturn(actionTypes); Mockito.when(actionPlanRepo.findOne(any(Integer.class))).thenReturn(actionPlans.get(0)); Mockito .when(actionRepo.findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdInitialContact"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class))) .thenReturn(actionsHHIC); Mockito.when( actionRepo.findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdUploadIAC"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class))) .thenReturn(actionsHHIACLOAD); // TODO BRES // Mockito.when(caseSvcClientService.getCaseType(eq(1))).thenReturn(caseTypeDTOs.get(0)); Mockito.when(caseSvcClientService.getCaseGroup(eq(1))).thenReturn(caseGroupDTOs.get(0)); Mockito.when(caseSvcClientService.getCase(eq(1))).thenReturn(caseDTOs.get(0)); Mockito.when(caseSvcClientService.getCase(eq(2))).thenReturn(caseDTOs.get(1)); Mockito.when(caseSvcClientService.getCase(eq(3))).thenReturn(caseDTOs.get(2)); Mockito.when(caseSvcClientService.getCase(eq(4))).thenReturn(caseDTOs.get(3)); // TODO BRES // Mockito.when(caseSvcClientService.getAddress(eq(FAKE_UPRN))) // .thenReturn(addressDTOsUprn1234.get(0)); // Mockito.when(caseSvcClientService.getCaseEvents(eq(1))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(0)})); Mockito.when(caseSvcClientService.getCaseEvents(eq(2))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(1)})); Mockito.when(caseSvcClientService.getCaseEvents(eq(3))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(2)})); Mockito.when(caseSvcClientService.getCaseEvents(eq(4))) .thenReturn(Arrays.asList(new CaseEventDTO[] {caseEventDTOs.get(3)})); Mockito.when( caseSvcClientService.createNewCaseEvent(any(Action.class), eq(CategoryDTO.CategoryType.ACTION_CREATED))) .thenReturn(caseEventDTOsPost.get(2)); // let it roll actionDistributor.distribute(); // assert the right calls were made verify(actionTypeRepo).findAll(); verify(actionRepo).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdInitialContact"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(actionRepo).findByActionTypeNameAndStateInAndActionIdNotIn(eq("HouseholdUploadIAC"), anyListOf(ActionState.class), anyListOf(BigInteger.class), any(Pageable.class)); verify(caseSvcClientService).getCase(eq(1)); verify(caseSvcClientService).getCase(eq(2)); verify(caseSvcClientService).getCase(eq(3)); verify(caseSvcClientService).getCase(eq(4)); // TODO BRES // verify(caseSvcClientService, times(4)).getAddress(eq(FAKE_UPRN)); verify(caseSvcClientService).getCaseEvents(eq(1)); verify(caseSvcClientService).getCaseEvents(eq(2)); verify(caseSvcClientService).getCaseEvents(eq(3)); verify(caseSvcClientService).getCaseEvents(eq(4)); verify(caseSvcClientService, times(4)).createNewCaseEvent(any(Action.class), eq(CategoryDTO.CategoryType.ACTION_CREATED)); verify(instructionPublisher, times(1)).sendInstructions(eq("Printer"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); verify(instructionPublisher, times(1)).sendInstructions(eq("HHSurvey"), anyListOf(ActionRequest.class), anyListOf(ActionCancel.class)); } }
/* * Copyright (C) 2015 8tory, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.github.retrofit2; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.json.JSONObject; import org.json.JSONException; import static org.mockito.Mockito.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.github.*; import rx.Observable; import rx.functions.*; import java.util.Arrays; import java.util.List; import retrofit.client.Response; import retrofit.RetrofitError; import java.io.*; import java.util.concurrent.CountDownLatch; public class MainTest { @Test public void testGetWithBaseUrl() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.contributorsWithoutAuth("yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); assertTrue(contributorsWithoutAuth.contains("JakeWharton")); assertTrue(contributorsWithoutAuth.size() > 1); } @Test public void testGetWithoutBaseUrl() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.contributorsWithoutBaseUrl("yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); assertTrue(contributorsWithoutAuth.contains("JakeWharton")); assertTrue(contributorsWithoutAuth.size() > 1); } @Test public void testGetWithUrl() { GitHub github = GitHub.create(); List<String> contributors = github.contributorsDynamic("https://api.github.com/repos/yongjhih/retrofit/contributors").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); assertTrue(contributors.contains("JakeWharton")); assertTrue(contributors.size() > 1); } /* //tested @Test public void testPostBody() { GitHub github = GitHub.create(); Repository localRepo = new Repository(); localRepo.name = "tmp"; Repository repository = github.createRepository(localRepo).toBlocking().first(); assertTrue(repository.name.equals("tmp")); } */ /* //tested @Test public void testDelete() { GitHub github = GitHub.create(); Repository localRepo = new Repository(); localRepo.name = "tmp"; Response response = github.deleteRepository("yongjhih", "tmp"); assertTrue(response.getStatus() == 204); } */ @Test public void testPut() { } @Test public void testPostField() { } @Test public void testGetFile() { } @Test public void testPostFile() { } @Test public void testPostPart() { } @Test public void testGetWithHeader() { } @Test public void testPutTypedFile() { } @Test public void testPutTypedFileTypedString() { } @Test public void testGson() { } @Test public void testJackson() { } @Test public void testMoshi() { } @Test public void testLoganSquare() { } @Test public void testAutoJson() { } /* //tested @Test public void testStar() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.star("YOUR_TOKEN_HERE", "yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); } //tested @Test public void testUnstar() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.unstar("YOUR_TOKEN_HERE", "yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); } */ @Test public void testObservableResponse() { GitHub github = GitHub.create(); String string = github.contributorResponse("yongjhih", "retrofit").map(new Func1<Response, String>() { @Override public String call(Response response) { StringBuilder sb = new StringBuilder(); try { BufferedReader reader = new BufferedReader(new InputStreamReader(response.getBody().in())); String read = null; read = reader.readLine(); while (read != null) { sb.append(read); read = reader.readLine(); } } catch (IOException e) { } return sb.toString(); } }).toBlocking().single(); System.out.println(string); assertTrue(string.contains("JakeWharton")); } @Test public void testCallbackResponse() { final CountDownLatch signal = new CountDownLatch(1); GitHub github = GitHub.create(); github.contributorResponse("yongjhih", "retrofit", new retrofit.Callback<Response>() { @Override public void success(Response response, Response response2) { StringBuilder sb = new StringBuilder(); try { BufferedReader reader = new BufferedReader(new InputStreamReader(response.getBody().in())); String read = null; read = reader.readLine(); while (read != null) { sb.append(read); read = reader.readLine(); } } catch (IOException e) { } String string = sb.toString(); System.out.println(string); assertTrue(string.contains("JakeWharton")); signal.countDown(); } @Override public void failure(RetrofitError retrofitError) { assertTrue(false); signal.countDown(); } }); try { signal.await(); } catch (InterruptedException e) { assertTrue(false); } } @Test public void testCallbackList() { final CountDownLatch signal = new CountDownLatch(1); GitHub github = GitHub.create(); github.contributorList("yongjhih", "retrofit", new retrofit.Callback<List<Contributor>>() { @Override public void success(List<Contributor> list, Response response) { boolean contains = false; for (Contributor c : list) { System.out.println(c.login); if (!c.login.equals("yongjhih")) continue; contains = true; } assertTrue(contains); signal.countDown(); } @Override public void failure(RetrofitError retrofitError) { assertTrue(false); signal.countDown(); } }); try { signal.await(); } catch (InterruptedException e) { assertTrue(false); } } @Test public void testBlockingList() { GitHub github = GitHub.create(); List<Contributor> list = github.contributorListBlocking("yongjhih", "retrofit"); boolean contains = false; for (Contributor c : list) { System.out.println(c.login); if (!c.login.equals("yongjhih")) continue; contains = true; } assertTrue(contains); } @Test public void testBlockingResponse() { GitHub github = GitHub.create(); Response response = github.contributorResponseBlocking("yongjhih", "retrofit"); StringBuilder sb = new StringBuilder(); try { BufferedReader reader = new BufferedReader(new InputStreamReader(response.getBody().in())); String read = null; read = reader.readLine(); while (read != null) { sb.append(read); read = reader.readLine(); } } catch (IOException e) { } String string = sb.toString(); System.out.println(string); assertTrue(string.contains("JakeWharton")); } @Test public void testMethodGson() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.contributorsWithGson("yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); assertTrue(contributorsWithoutAuth.contains("JakeWharton")); assertTrue(contributorsWithoutAuth.size() > 1); } @Test public void testMethodDateGson() { GitHub github = GitHub.create(); List<String> contributorsWithoutAuth = github.contributorsWithDateGson("yongjhih", "retrofit").map(new Func1<Contributor, String>() { @Override public String call(Contributor contributor) { System.out.println(contributor.login); return contributor.login; } }).toList().toBlocking().single(); assertTrue(contributorsWithoutAuth.contains("JakeWharton")); assertTrue(contributorsWithoutAuth.size() > 1); } @Test public void testAndroidAuthenticationRequestInterceptor() { } @Test public void testGlobalHeaders() { } @Test public void testRetryHeaders() { } @Test public void testRequestInterceptor() { } @Test public void testRequestInterceptorOnMethod() { } @Test public void testErrorHandler() { } @Test public void testErrorHandlerOnMethod() { } @Test public void testLogLevel() { } @Test public void testConverterOnMethod() { } }
package se.bjurr.gitchangelog.api.model; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.CUSTOM; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.GITHUB; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.GITLAB; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.JIRA; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.NOISSUE; import static se.bjurr.gitchangelog.internal.settings.SettingsIssueType.REDMINE; import static se.bjurr.gitchangelog.internal.util.Preconditions.checkNotNull; import static se.bjurr.gitchangelog.internal.util.Preconditions.checkState; import static se.bjurr.gitchangelog.internal.util.Preconditions.isNullOrEmpty; import static se.bjurr.gitchangelog.internal.util.Preconditions.nullToEmpty; import java.io.Serializable; import java.util.List; import se.bjurr.gitchangelog.api.model.interfaces.IAuthors; import se.bjurr.gitchangelog.api.model.interfaces.ICommits; import se.bjurr.gitchangelog.internal.settings.SettingsIssueType; public class Issue implements ICommits, IAuthors, Serializable { private static final long serialVersionUID = -7571341639024417199L; private final List<Commit> commits; private final List<Author> authors; /** Like JIRA, or GitHub. */ private final String name; /** Like the title of a Jira. */ private final String title; private final boolean hasTitle; /** Like the actual Jira, JIR-ABC. */ private final String issue; private final boolean hasIssue; /** A link to the issue, http://..... */ private final String link; private final boolean hasLink; /** Type of issue, perhaps Story, Bug and etc */ private final String type; private final boolean hasType; private final boolean hasDescription; private final String description; /** Labels on the issue, for GitHub it may be bug, enhancement, ... */ private final List<String> labels; private final boolean hasLabels; private final boolean hasLinkedIssues; private final SettingsIssueType issueType; private final List<String> linkedIssues; public Issue( final List<Commit> commits, final List<Author> authors, final String name, final String title, final String issue, final SettingsIssueType issueType, final String description, final String link, final String type, final List<String> linkedIssues, final List<String> labels) { checkState(!commits.isEmpty(), "commits"); this.commits = commits; this.authors = checkNotNull(authors, "authors"); this.name = checkNotNull(name, "name"); this.title = nullToEmpty(title); this.hasTitle = !isNullOrEmpty(title); this.description = nullToEmpty(description); this.hasDescription = !isNullOrEmpty(description); this.issue = nullToEmpty(issue); this.issueType = checkNotNull(issueType, "issueType"); this.hasIssue = !isNullOrEmpty(issue); this.link = nullToEmpty(link); this.hasLink = !isNullOrEmpty(link); this.hasType = !isNullOrEmpty(type); this.type = nullToEmpty(type); this.hasLabels = labels != null && !labels.isEmpty(); this.hasLinkedIssues = linkedIssues != null && !linkedIssues.isEmpty(); this.linkedIssues = linkedIssues; this.labels = labels; } public SettingsIssueType getIssueType() { return this.issueType; } public boolean isJira() { return this.issueType == JIRA; } public boolean isRedmine() { return this.issueType == REDMINE; } public boolean isGitHub() { return this.issueType == GITHUB; } public boolean isGitLab() { return this.issueType == GITLAB; } public boolean isCustom() { return this.issueType == CUSTOM; } public boolean isNoIssue() { return this.issueType == NOISSUE; } public String getTitle() { return this.title; } public boolean getHasTitle() { return this.hasTitle; } public boolean getHasIssue() { return this.hasIssue; } public boolean getHasLabels() { return this.hasLabels; } public boolean getHasType() { return this.hasType; } public boolean getHasLink() { return this.hasLink; } public String getIssue() { return this.issue; } public String getLink() { return this.link; } public String getName() { return this.name; } public String getType() { return this.type; } public boolean getHasDescription() { return this.hasDescription; } public String getDescription() { return this.description; } @Override public List<Author> getAuthors() { return this.authors; } @Override public List<Commit> getCommits() { return this.commits; } public List<String> getLabels() { return this.labels; } public List<String> getLinkedIssues() { return this.linkedIssues; } public boolean getHasLinkedIssues() { return this.hasLinkedIssues; } @Override public String toString() { return "Issue: " + this.issue + " Title: " + this.title; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.authors == null) ? 0 : this.authors.hashCode()); result = prime * result + ((this.commits == null) ? 0 : this.commits.hashCode()); result = prime * result + ((this.description == null) ? 0 : this.description.hashCode()); result = prime * result + (this.hasDescription ? 1231 : 1237); result = prime * result + (this.hasIssue ? 1231 : 1237); result = prime * result + (this.hasLabels ? 1231 : 1237); result = prime * result + (this.hasLink ? 1231 : 1237); result = prime * result + (this.hasLinkedIssues ? 1231 : 1237); result = prime * result + (this.hasTitle ? 1231 : 1237); result = prime * result + (this.hasType ? 1231 : 1237); result = prime * result + ((this.issue == null) ? 0 : this.issue.hashCode()); result = prime * result + ((this.issueType == null) ? 0 : this.issueType.hashCode()); result = prime * result + ((this.labels == null) ? 0 : this.labels.hashCode()); result = prime * result + ((this.link == null) ? 0 : this.link.hashCode()); result = prime * result + ((this.linkedIssues == null) ? 0 : this.linkedIssues.hashCode()); result = prime * result + ((this.name == null) ? 0 : this.name.hashCode()); result = prime * result + ((this.title == null) ? 0 : this.title.hashCode()); result = prime * result + ((this.type == null) ? 0 : this.type.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } final Issue other = (Issue) obj; if (this.authors == null) { if (other.authors != null) { return false; } } else if (!this.authors.equals(other.authors)) { return false; } if (this.commits == null) { if (other.commits != null) { return false; } } else if (!this.commits.equals(other.commits)) { return false; } if (this.description == null) { if (other.description != null) { return false; } } else if (!this.description.equals(other.description)) { return false; } if (this.hasDescription != other.hasDescription) { return false; } if (this.hasIssue != other.hasIssue) { return false; } if (this.hasLabels != other.hasLabels) { return false; } if (this.hasLink != other.hasLink) { return false; } if (this.hasLinkedIssues != other.hasLinkedIssues) { return false; } if (this.hasTitle != other.hasTitle) { return false; } if (this.hasType != other.hasType) { return false; } if (this.issue == null) { if (other.issue != null) { return false; } } else if (!this.issue.equals(other.issue)) { return false; } if (this.issueType != other.issueType) { return false; } if (this.labels == null) { if (other.labels != null) { return false; } } else if (!this.labels.equals(other.labels)) { return false; } if (this.link == null) { if (other.link != null) { return false; } } else if (!this.link.equals(other.link)) { return false; } if (this.linkedIssues == null) { if (other.linkedIssues != null) { return false; } } else if (!this.linkedIssues.equals(other.linkedIssues)) { return false; } if (this.name == null) { if (other.name != null) { return false; } } else if (!this.name.equals(other.name)) { return false; } if (this.title == null) { if (other.title != null) { return false; } } else if (!this.title.equals(other.title)) { return false; } if (this.type == null) { if (other.type != null) { return false; } } else if (!this.type.equals(other.type)) { return false; } return true; } }
package com.oxande.xmlswing.example.explorer; import java.awt.BorderLayout; import java.awt.Cursor; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.lang.Runnable; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.BorderFactory; import javax.swing.ButtonGroup; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JRadioButtonMenuItem; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JToolBar; import javax.swing.JTree; import javax.swing.ScrollPaneConstants; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.border.Border; /** * Class created automatically -- DO NOT UPDATE MANUALLY. * This class has been created based on a XML file and must * be extended by your own code. The following code only * provide an easy way to obtain a basic GUI. */ public class AbstractExplorerFrame extends JFrame { private JPanel jpanel1 = new JPanel(); private JMenuBar jmenubar1 = new JMenuBar(); private JMenu jmenu1 = new JMenu(); private JMenu jmenu2 = new JMenu(); public ButtonGroup group1 = new ButtonGroup(); private JMenuItem jmenuitem1 = new JMenuItem(); private JMenu jmenu3 = new JMenu(); private JMenuItem jmenuitem2 = new JMenuItem(); protected JLabel statusBar = new JLabel(); protected JToolBar toolbar = new JToolBar(); private JSplitPane jsplitpane1 = new JSplitPane(); protected JTree pathTree = new JTree(); protected JList fileList = new JList(); private class SetStatusMessageClass implements Runnable { private String input; public SetStatusMessageClass(String input) { this.input = input; } public void run() { statusBar.setText(String.valueOf(input)); } } public class SimpleMapEntry implements java.util.Map.Entry<String,String> { private String key; private String value; public String getValue() { return value; } public String getKey() { return key; } public String toString() { return this.value; } public String setValue(String value) { String old = this.value; this.value = value.toString(); return old; } public SimpleMapEntry(java.lang.String key, java.lang.String value) { this.key = key; this.value = value; } } public String getStatusMessage() { return statusBar.getText(); } public void setStatusMessage(String in) { SwingUtilities.invokeLater(new SetStatusMessageClass(in)); } public void initComponents() { jpanel1.setLayout(new BorderLayout()); jmenu1.setText("File"); jmenu1.setMnemonic(java.awt.event.KeyEvent.VK_F); jmenu1.setDisplayedMnemonicIndex(0); jmenu2.setText("Look & Feel"); jmenu2.setMnemonic(java.awt.event.KeyEvent.VK_L); jmenu2.setDisplayedMnemonicIndex(0); UIManager.LookAndFeelInfo[] landf1 = UIManager.getInstalledLookAndFeels(); for(int i = 0; i < landf1.length; i++) { JRadioButtonMenuItem item = new JRadioButtonMenuItem(landf1[i].getName()); item.setActionCommand(landf1[i].getClassName()); item.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent event) { try { Cursor old = getCursor(); setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); UIManager.setLookAndFeel(event.getActionCommand()); SwingUtilities.updateComponentTreeUI(getRootPane()); setCursor(old); } catch(Exception e) { // Does nothing } } }); jmenu2.add(item); group1.add(item); if( UIManager.getLookAndFeel().getName().equals(landf1[i].getName()) ){ item.setSelected(true); } } jmenu1.add(jmenu2); jmenuitem1.setText("Exit"); jmenuitem1.setMnemonic(java.awt.event.KeyEvent.VK_X); jmenuitem1.setDisplayedMnemonicIndex(1); jmenuitem1.setAction(new AbstractAction() { { putValue(Action.NAME, "Exit"); putValue(Action.MNEMONIC_KEY, java.awt.event.KeyEvent.VK_X); putValue(Action.DISPLAYED_MNEMONIC_INDEX_KEY, new Integer(1)); } public void actionPerformed(ActionEvent e) { exit(); } } ); jmenu1.add(jmenuitem1); jmenubar1.add(jmenu1); jmenu3.setText("Help"); jmenu3.setMnemonic(java.awt.event.KeyEvent.VK_H); jmenu3.setDisplayedMnemonicIndex(0); jmenuitem2.setText("About"); jmenuitem2.setMnemonic(java.awt.event.KeyEvent.VK_A); jmenuitem2.setDisplayedMnemonicIndex(0); jmenuitem2.setAction(new AbstractAction() { { putValue(Action.NAME, "About"); putValue(Action.MNEMONIC_KEY, java.awt.event.KeyEvent.VK_A); putValue(Action.DISPLAYED_MNEMONIC_INDEX_KEY, new Integer(0)); } public void actionPerformed(ActionEvent e) { showAboutDlg(); } } ); jmenu3.add(jmenuitem2); jmenubar1.add(jmenu3); this.setJMenuBar(jmenubar1); Border border1 = BorderFactory.createLoweredBevelBorder(); statusBar.setBorder(border1); statusBar.setText("Ready."); statusBar.setFont(statusBar.getFont().deriveFont( Font.PLAIN )); jpanel1.add(statusBar, BorderLayout.SOUTH); toolbar.setOrientation(JToolBar.HORIZONTAL); jpanel1.add(toolbar, "North"); JScrollPane scrollPane1 = new JScrollPane(pathTree,ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED,ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); jsplitpane1.setTopComponent(scrollPane1); JScrollPane scrollPane2 = new JScrollPane(fileList,ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED,ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); jsplitpane1.setBottomComponent(scrollPane2); jpanel1.add(jsplitpane1, BorderLayout.CENTER); this.setContentPane(jpanel1); this.setPreferredSize(new java.awt.Dimension(300,150)); this.setName("com.oxande.xmlswing.example.explorer.AbstractExplorerFrame"); this.setLocationByPlatform(true); this.setTitle("File Explorer"); this.pack(); } /** * Called by the menu item <i>Help/About</i>. */ protected void showAboutDlg() { JOptionPane.showMessageDialog(jmenuitem2, "Not implemented.",jmenuitem2.getText(), JOptionPane.INFORMATION_MESSAGE); } /** * Called by the menu item <i>File/Exit</i>. */ protected void exit() { JOptionPane.showMessageDialog(jmenuitem1, "Not implemented.",jmenuitem1.getText(), JOptionPane.INFORMATION_MESSAGE); } public static void main(String[] args) { AbstractExplorerFrame appl = new AbstractExplorerFrame(); appl.initComponents(); appl.setDefaultCloseOperation(javax.swing.JFrame.EXIT_ON_CLOSE); appl.setVisible(true); } }
/* * Licensed to CRATE Technology GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.analyze.relations; import io.crate.exceptions.AmbiguousColumnException; import io.crate.exceptions.ColumnUnknownException; import io.crate.exceptions.RelationUnknown; import io.crate.expression.symbol.Symbol; import io.crate.metadata.RelationName; import io.crate.metadata.Schemas; import io.crate.metadata.table.Operation; import io.crate.sql.tree.QualifiedName; import org.elasticsearch.test.ESTestCase; import io.crate.testing.DummyRelation; import org.junit.Test; import java.util.Arrays; import java.util.Map; import java.util.stream.Collectors; import static io.crate.testing.SymbolMatchers.isField; public class FieldProviderTest extends ESTestCase { private AnalyzedRelation dummyRelation = new DummyRelation("name"); private Map<QualifiedName, AnalyzedRelation> dummySources = Map.of(new QualifiedName("dummy"), dummyRelation); private static QualifiedName newQN(String dottedName) { return new QualifiedName(Arrays.asList(dottedName.split("\\."))); } private static FullQualifiedNameFieldProvider newFQFieldProvider(Map<QualifiedName, AnalyzedRelation> sources) { Map<RelationName, AnalyzedRelation> relations = sources.entrySet().stream() .collect(Collectors.toMap( entry -> RelationName.of(entry.getKey(), "doc"), Map.Entry::getValue )); return new FullQualifiedNameFieldProvider( relations, ParentRelations.NO_PARENTS, Schemas.DOC_SCHEMA_NAME ); } @Test public void testInvalidSources() throws Exception { expectedException.expectMessage("Table with more than 2 QualifiedName parts is not supported. Only <schema>.<tableName> works."); AnalyzedRelation relation = new DummyRelation("name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("too.many.parts"), relation)); resolver.resolveField(newQN("name"), null, Operation.READ); } @Test public void testUnknownSchema() throws Exception { expectedException.expect(RelationUnknown.class); expectedException.expectMessage("Relation 'invalid.table' unknown"); FieldProvider<Symbol> resolver = newFQFieldProvider(dummySources); resolver.resolveField(newQN("invalid.table.name"), null, Operation.READ); } @Test public void testUnknownTable() throws Exception { expectedException.expect(RelationUnknown.class); expectedException.expectMessage("Relation 'dummy.invalid' unknown"); FieldProvider<Symbol> resolver = newFQFieldProvider(dummySources); resolver.resolveField(newQN("dummy.invalid.name"), null, Operation.READ); } @Test public void testSysColumnWithoutSourceRelation() throws Exception { expectedException.expect(RelationUnknown.class); expectedException.expectMessage("Relation 'sys.nodes' unknown"); FieldProvider<Symbol> resolver = newFQFieldProvider(dummySources); resolver.resolveField(newQN("sys.nodes.name"), null, Operation.READ); } @Test public void testRegularColumnUnknown() throws Exception { expectedException.expect(ColumnUnknownException.class); FieldProvider<Symbol> resolver = newFQFieldProvider(dummySources); resolver.resolveField(newQN("age"), null, Operation.READ); } @Test public void testResolveDynamicReference() throws Exception { expectedException.expect(ColumnUnknownException.class); expectedException.expectMessage("Column age unknown"); AnalyzedRelation barT = new DummyRelation("name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("bar.t"), barT)); resolver.resolveField(newQN("t.age"), null, Operation.READ); } @Test public void testMultipleSourcesWithDynamicReferenceAndReference() throws Exception { AnalyzedRelation barT = new DummyRelation(new RelationName("bar", "t"), "name"); AnalyzedRelation fooT = new DummyRelation(new RelationName("foo", "t"), "name"); AnalyzedRelation fooA = new DummyRelation(new RelationName("foo", "a"), "name"); AnalyzedRelation customT = new DummyRelation(new RelationName("custom", "t"), "tags"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of( newQN("bar.t"), barT, newQN("foo.t"), fooT, newQN("foo.a"), fooA, newQN("custom.t"), customT)); Symbol field = resolver.resolveField(newQN("foo.t.name"), null, Operation.READ); assertThat(field, isField("name", fooT.relationName())); Symbol tags = resolver.resolveField(newQN("tags"), null, Operation.READ); assertThat(tags, isField("tags", customT.relationName())); field = resolver.resolveField(newQN("a.name"), null, Operation.READ); assertThat(field, isField("name", fooA.relationName())); } @Test public void testRelationOutputFromAlias() throws Exception { // t.name from doc.foo t AnalyzedRelation relation = new DummyRelation(new RelationName("doc", "t"), "name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of( new QualifiedName(Arrays.asList("t")), relation)); Symbol field = resolver.resolveField(newQN("t.name"), null, Operation.READ); assertThat(field, isField("name", relation.relationName())); } @Test public void testRelationOutputFromSingleColumnName() throws Exception { // select name from t AnalyzedRelation relation = new DummyRelation("name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("doc.t"), relation)); Symbol field = resolver.resolveField(newQN("name"), null, Operation.READ); assertThat(field, isField("name", relation.relationName())); } @Test public void testRelationOutputFromSchemaTableColumnName() throws Exception { // doc.t.name from t.name AnalyzedRelation relation = new DummyRelation(new RelationName("doc", "t"), "name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("doc.t"), relation)); Symbol field = resolver.resolveField(newQN("doc.t.name"), null, Operation.INSERT); assertThat(field, isField("name", relation.relationName())); } @Test public void testTooManyParts() throws Exception { expectedException.expect(IllegalArgumentException.class); FieldProvider<Symbol> resolver = newFQFieldProvider(dummySources); resolver.resolveField(new QualifiedName(Arrays.asList("a", "b", "c", "d")), null, Operation.READ); } @Test public void testTooManyPartsNameFieldResolver() throws Exception { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("Column reference \"a.b\" has too many parts. A column must not have a schema or a table here."); FieldProvider<Symbol> resolver = new NameFieldProvider(dummyRelation); resolver.resolveField(new QualifiedName(Arrays.asList("a", "b")), null, Operation.READ); } @Test public void testRelationFromTwoTablesWithSameNameDifferentSchemaIsAmbiguous() throws Exception { // select t.name from custom.t.name, doc.t.name expectedException.expect(AmbiguousColumnException.class); expectedException.expectMessage("Column \"name\" is ambiguous"); FieldProvider<Symbol> resolver = newFQFieldProvider( Map.of( new QualifiedName(Arrays.asList("custom", "t")), new DummyRelation("name"), new QualifiedName(Arrays.asList("doc", "t")), new DummyRelation("name")) ); resolver.resolveField(new QualifiedName(Arrays.asList("t", "name")), null, Operation.READ); } @Test public void testRelationFromTwoTables() throws Exception { // select name from doc.t, custom.t FieldProvider<Symbol> resolver = newFQFieldProvider( Map.of( new QualifiedName(Arrays.asList("custom", "t")), new DummyRelation("address"), new QualifiedName(Arrays.asList("doc", "t")), new DummyRelation("name")) ); resolver.resolveField(new QualifiedName(Arrays.asList("t", "name")), null, Operation.READ); } @Test public void testSimpleFieldResolver() throws Exception { // select name from doc.t AnalyzedRelation relation = new DummyRelation("name"); FieldProvider<Symbol> resolver = new NameFieldProvider(relation); Symbol field = resolver.resolveField(new QualifiedName(Arrays.asList("name")), null, Operation.READ); assertThat(field, isField("name", relation.relationName())); } @Test public void testSimpleResolverUnknownColumn() throws Exception { expectedException.expect(ColumnUnknownException.class); expectedException.expectMessage("Column unknown unknown"); AnalyzedRelation relation = new DummyRelation("name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("doc.t"), relation)); resolver.resolveField(new QualifiedName(Arrays.asList("unknown")), null, Operation.READ); } @Test public void testColumnSchemaResolver() throws Exception { AnalyzedRelation barT = new DummyRelation(new RelationName("Foo", "Bar"), "\"Name\""); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("\"Foo\".\"Bar\""), barT)); Symbol field = resolver.resolveField(newQN("\"Foo\".\"Bar\".\"Name\""), null, Operation.READ); assertThat(field, isField("\"Name\"", barT.relationName())); } @Test public void testColumnSchemaResolverFail() throws Exception { expectedException.expect(ColumnUnknownException.class); expectedException.expectMessage("Column name unknown"); AnalyzedRelation barT = new DummyRelation("\"Name\""); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("bar"), barT)); resolver.resolveField(newQN("bar.name"), null, Operation.READ); } @Test public void testAliasRelationNameResolver() throws Exception { AnalyzedRelation barT = new DummyRelation(new RelationName("doc", "Bar"), "name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("\"Bar\""), barT)); Symbol field = resolver.resolveField(newQN("\"Bar\".name"), null, Operation.READ); assertThat(field, isField("name", barT.relationName())); } @Test public void testAliasRelationNameResolverFail() throws Exception { expectedException.expect(RelationUnknown.class); expectedException.expectMessage("Relation 'doc.\"Bar\"' unknown"); AnalyzedRelation barT = new DummyRelation("name"); FieldProvider<Symbol> resolver = newFQFieldProvider(Map.of(newQN("bar"), barT)); resolver.resolveField(newQN("\"Bar\".name"), null, Operation.READ); } }
/* Evernote API sample code, structured as a simple command line application that demonstrates several API calls. To compile (Unix): javac -classpath ../../target/evernote-api-*.jar EDAMDemo.java To run: java -classpath ../../target/evernote-api-*.jar EDAMDemo Full documentation of the Evernote API can be found at http://dev.evernote.com/documentation/cloud/ */ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.security.MessageDigest; import java.util.Iterator; import java.util.List; import com.evernote.auth.EvernoteAuth; import com.evernote.auth.EvernoteService; import com.evernote.clients.ClientFactory; import com.evernote.clients.NoteStoreClient; import com.evernote.clients.UserStoreClient; import com.evernote.edam.error.EDAMErrorCode; import com.evernote.edam.error.EDAMSystemException; import com.evernote.edam.error.EDAMUserException; import com.evernote.edam.notestore.NoteFilter; import com.evernote.edam.notestore.NoteList; import com.evernote.edam.type.Data; import com.evernote.edam.type.Note; import com.evernote.edam.type.NoteSortOrder; import com.evernote.edam.type.Notebook; import com.evernote.edam.type.Resource; import com.evernote.edam.type.ResourceAttributes; import com.evernote.edam.type.Tag; import com.evernote.thrift.transport.TTransportException; public class EDAMDemo { /*************************************************************************** * You must change the following values before running this sample code * ***************************************************************************/ // Real applications authenticate with Evernote using OAuth, but for the // purpose of exploring the API, you can get a developer token that allows // you to access your own Evernote account. To get a developer token, visit // https://sandbox.evernote.com/api/DeveloperToken.action private static final String AUTH_TOKEN = "your developer token"; /*************************************************************************** * You shouldn't need to change anything below here to run sample code * ***************************************************************************/ private UserStoreClient userStore; private NoteStoreClient noteStore; private String newNoteGuid; /** * Console entry point. */ public static void main(String args[]) throws Exception { String token = System.getenv("AUTH_TOKEN"); if (token == null) { token = AUTH_TOKEN; } if ("your developer token".equals(token)) { System.err.println("Please fill in your developer token"); System.err .println("To get a developer token, go to https://sandbox.evernote.com/api/DeveloperToken.action"); return; } EDAMDemo demo = new EDAMDemo(token); try { demo.listNotes(); demo.createNote(); demo.searchNotes(); demo.updateNoteTag(); } catch (EDAMUserException e) { // These are the most common error types that you'll need to // handle // EDAMUserException is thrown when an API call fails because a // parameter was invalid. if (e.getErrorCode() == EDAMErrorCode.AUTH_EXPIRED) { System.err.println("Your authentication token is expired!"); } else if (e.getErrorCode() == EDAMErrorCode.INVALID_AUTH) { System.err.println("Your authentication token is invalid!"); } else if (e.getErrorCode() == EDAMErrorCode.QUOTA_REACHED) { System.err.println("Your authentication token is invalid!"); } else { System.err.println("Error: " + e.getErrorCode().toString() + " parameter: " + e.getParameter()); } } catch (EDAMSystemException e) { System.err.println("System error: " + e.getErrorCode().toString()); } catch (TTransportException t) { System.err.println("Networking error: " + t.getMessage()); } } /** * Intialize UserStore and NoteStore clients. During this step, we * authenticate with the Evernote web service. All of this code is boilerplate * - you can copy it straight into your application. */ public EDAMDemo(String token) throws Exception { // Set up the UserStore client and check that we can speak to the server EvernoteAuth evernoteAuth = new EvernoteAuth(EvernoteService.SANDBOX, token); ClientFactory factory = new ClientFactory(evernoteAuth); userStore = factory.createUserStoreClient(); boolean versionOk = userStore.checkVersion("Evernote EDAMDemo (Java)", com.evernote.edam.userstore.Constants.EDAM_VERSION_MAJOR, com.evernote.edam.userstore.Constants.EDAM_VERSION_MINOR); if (!versionOk) { System.err.println("Incompatible Evernote client protocol version"); System.exit(1); } // Set up the NoteStore client noteStore = factory.createNoteStoreClient(); } /** * Retrieve and display a list of the user's notes. */ private void listNotes() throws Exception { // List the notes in the user's account System.out.println("Listing notes:"); // First, get a list of all notebooks List<Notebook> notebooks = noteStore.listNotebooks(); for (Notebook notebook : notebooks) { System.out.println("Notebook: " + notebook.getName()); // Next, search for the first 100 notes in this notebook, ordering // by creation date NoteFilter filter = new NoteFilter(); filter.setNotebookGuid(notebook.getGuid()); filter.setOrder(NoteSortOrder.CREATED.getValue()); filter.setAscending(true); NoteList noteList = noteStore.findNotes(filter, 0, 100); List<Note> notes = noteList.getNotes(); for (Note note : notes) { System.out.println(" * " + note.getTitle()); } } System.out.println(); } /** * Create a new note containing a little text and the Evernote icon. */ private void createNote() throws Exception { // To create a new note, simply create a new Note object and fill in // attributes such as the note's title. Note note = new Note(); note.setTitle("Test note from EDAMDemo.java"); String fileName = "enlogo.png"; String mimeType = "image/png"; // To include an attachment such as an image in a note, first create a // Resource // for the attachment. At a minimum, the Resource contains the binary // attachment // data, an MD5 hash of the binary data, and the attachment MIME type. // It can also // include attributes such as filename and location. Resource resource = new Resource(); resource.setData(readFileAsData(fileName)); resource.setMime(mimeType); ResourceAttributes attributes = new ResourceAttributes(); attributes.setFileName(fileName); resource.setAttributes(attributes); // Now, add the new Resource to the note's list of resources note.addToResources(resource); // To display the Resource as part of the note's content, include an // <en-media> // tag in the note's ENML content. The en-media tag identifies the // corresponding // Resource using the MD5 hash. String hashHex = bytesToHex(resource.getData().getBodyHash()); // The content of an Evernote note is represented using Evernote Markup // Language // (ENML). The full ENML specification can be found in the Evernote API // Overview // at http://dev.evernote.com/documentation/cloud/chapters/ENML.php String content = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<!DOCTYPE en-note SYSTEM \"http://xml.evernote.com/pub/enml2.dtd\">" + "<en-note>" + "<span style=\"color:green;\">Here's the Evernote logo:</span><br/>" + "<en-media type=\"image/png\" hash=\"" + hashHex + "\"/>" + "</en-note>"; note.setContent(content); // Finally, send the new note to Evernote using the createNote method // The new Note object that is returned will contain server-generated // attributes such as the new note's unique GUID. Note createdNote = noteStore.createNote(note); newNoteGuid = createdNote.getGuid(); System.out.println("Successfully created a new note with GUID: " + newNoteGuid); System.out.println(); } /** * Search a user's notes and display the results. */ private void searchNotes() throws Exception { // Searches are formatted according to the Evernote search grammar. // Learn more at // http://dev.evernote.com/documentation/cloud/chapters/Searching_notes.php // In this example, we search for notes that have the term "EDAMDemo" in // the title. // This should return the sample note that we created in this demo app. String query = "intitle:EDAMDemo"; // To search for notes with a specific tag, we could do something like // this: // String query = "tag:tagname"; // To search for all notes with the word "elephant" anywhere in them: // String query = "elephant"; NoteFilter filter = new NoteFilter(); filter.setWords(query); filter.setOrder(NoteSortOrder.UPDATED.getValue()); filter.setAscending(false); // Find the first 50 notes matching the search System.out.println("Searching for notes matching query: " + query); NoteList notes = noteStore.findNotes(filter, 0, 50); System.out.println("Found " + notes.getTotalNotes() + " matching notes"); Iterator<Note> iter = notes.getNotesIterator(); while (iter.hasNext()) { Note note = iter.next(); System.out.println("Note: " + note.getTitle()); // Note objects returned by findNotes() only contain note attributes // such as title, GUID, creation date, update date, etc. The note // content // and binary resource data are omitted, although resource metadata // is included. // To get the note content and/or binary resources, call getNote() // using the note's GUID. Note fullNote = noteStore.getNote(note.getGuid(), true, true, false, false); System.out.println("Note contains " + fullNote.getResourcesSize() + " resources"); System.out.println(); } } /** * Update the tags assigned to a note. This method demonstrates how only * modified fields need to be sent in calls to updateNote. */ private void updateNoteTag() throws Exception { // When updating a note, it is only necessary to send Evernote the // fields that have changed. For example, if the Note that you // send via updateNote does not have the resources field set, the // Evernote server will not change the note's existing resources. // If you wanted to remove all resources from a note, you would // set the resources field to a new List<Resource> that is empty. // If you are only changing attributes such as the note's title or tags, // you can save time and bandwidth by omitting the note content and // resources. // In this sample code, we fetch the note that we created earlier, // including // the full note content and all resources. A real application might // do something with the note, then update a note attribute such as a // tag. Note note = noteStore.getNote(newNoteGuid, true, true, false, false); // Do something with the note contents or resources... // Now, update the note. Because we're not changing them, we unset // the content and resources. All we want to change is the tags. note.unsetContent(); note.unsetResources(); // We want to apply the tag "TestTag" note.addToTagNames("TestTag"); // Now update the note. Because we haven't set the content or resources, // they won't be changed. noteStore.updateNote(note); System.out.println("Successfully added tag to existing note"); // To prove that we didn't destroy the note, let's fetch it again and // verify that it still has 1 resource. note = noteStore.getNote(newNoteGuid, false, false, false, false); System.out.println("After update, note has " + note.getResourcesSize() + " resource(s)"); System.out.println("After update, note tags are: "); for (String tagGuid : note.getTagGuids()) { Tag tag = noteStore.getTag(tagGuid); System.out.println("* " + tag.getName()); } System.out.println(); } /** * Helper method to read the contents of a file on disk and create a new Data * object. */ private static Data readFileAsData(String fileName) throws Exception { String filePath = new File(EDAMDemo.class.getResource( EDAMDemo.class.getCanonicalName() + ".class").getPath()).getParent() + File.separator + fileName; // Read the full binary contents of the file FileInputStream in = new FileInputStream(filePath); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); byte[] block = new byte[10240]; int len; while ((len = in.read(block)) >= 0) { byteOut.write(block, 0, len); } in.close(); byte[] body = byteOut.toByteArray(); // Create a new Data object to contain the file contents Data data = new Data(); data.setSize(body.length); data.setBodyHash(MessageDigest.getInstance("MD5").digest(body)); data.setBody(body); return data; } /** * Helper method to convert a byte array to a hexadecimal string. */ public static String bytesToHex(byte[] bytes) { StringBuilder sb = new StringBuilder(); for (byte hashByte : bytes) { int intVal = 0xff & hashByte; if (intVal < 0x10) { sb.append('0'); } sb.append(Integer.toHexString(intVal)); } return sb.toString(); } }
package org.zaproxy.zap.eventBus; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; /** * A very simple event bus * @author simon * */ public class SimpleEventBus implements EventBus { private Map<String, RegisteredPublisher> nameToPublisher = new HashMap<String, RegisteredPublisher>(); private List<RegisteredConsumer> danglingConsumers = new ArrayList<RegisteredConsumer>(); private static Logger log = Logger.getLogger(SimpleEventBus.class); @Override public void registerPublisher(EventPublisher publisher, String[] eventTypes) { if (publisher == null) { throw new InvalidParameterException("Publisher must not be null"); } if (eventTypes == null || eventTypes.length == 0) { throw new InvalidParameterException("At least one event type must be specified"); } if (this.nameToPublisher.get(publisher.getPublisherName()) != null) { throw new InvalidParameterException("Publisher with name " + publisher.getPublisherName() + " already registered by " + this.nameToPublisher.get(publisher.getPublisherName()) .getPublisher().getClass().getCanonicalName()); } log.debug("registerPublisher " + publisher.getPublisherName()); RegisteredPublisher regProd = new RegisteredPublisher(publisher, eventTypes); this.nameToPublisher.put(publisher.getPublisherName(), regProd); // Check to see if there are any cached consumers for (RegisteredConsumer regCon : this.danglingConsumers) { if (regCon.getPublisherName().equals(publisher.getPublisherName())) { regProd.addComsumer(regCon); this.danglingConsumers.remove(regCon); break; } } } @Override public void unregisterPublisher(EventPublisher publisher) { if (publisher == null) { throw new InvalidParameterException("Publisher must not be null"); } log.debug("unregisterPublisher " + publisher.getPublisherName()); RegisteredPublisher regPub = nameToPublisher.remove(publisher.getPublisherName()); if (regPub == null) { throw new InvalidParameterException("Publisher with name " + publisher.getPublisherName() + " not registered"); } } @Override public void registerConsumer(EventConsumer consumer, String publisherName) { this.registerConsumer(consumer, publisherName, null); } @Override public void registerConsumer(EventConsumer consumer, String publisherName, String[] eventTypes) { if (consumer == null) { throw new InvalidParameterException("Consumer must not be null"); } log.debug("registerConsumer " + consumer.getClass().getCanonicalName() + " for " + publisherName); RegisteredPublisher publisher = this.nameToPublisher.get(publisherName); if (publisher == null) { // Cache until the publisher registers this.danglingConsumers.add(new RegisteredConsumer(consumer, eventTypes, publisherName)); } else { publisher.addComsumer(consumer, eventTypes); } } @Override public void unregisterConsumer(EventConsumer consumer, String publisherName) { if (consumer == null) { throw new InvalidParameterException("Consumer must not be null"); } log.debug("unregisterConsumer " + consumer.getClass().getCanonicalName() + " for " + publisherName); RegisteredPublisher publisher = this.nameToPublisher.get(publisherName); if (publisher != null) { publisher.removeComsumer(consumer); } else { // Check to see if its cached waiting for the publisher for (RegisteredConsumer regCon : this.danglingConsumers) { if (regCon.getConsumer().equals(consumer)) { this.danglingConsumers.remove(regCon); break; } } } } @Override public void publishSyncEvent(EventPublisher publisher, Event event) { if (publisher == null) { throw new InvalidParameterException("Publisher must not be null"); } RegisteredPublisher regPublisher = this.nameToPublisher.get(publisher.getPublisherName()); if (regPublisher == null) { throw new InvalidParameterException("Publisher not registered: " + publisher.getPublisherName()); } log.debug("publishSyncEvent " + event.getEventType() + " from " + publisher.getPublisherName()); boolean foundType = false; for (String type : regPublisher.getEventTypes()) { if (event.getEventType().equals(type)) { foundType = true; break; } } if (! foundType) { throw new InvalidParameterException("Event type: " + event.getEventType() + " not registered for publisher: " + publisher.getPublisherName()); } for (RegisteredConsumer regCon : regPublisher.getConsumers()) { String[] eventTypes = regCon.getEventTypes(); boolean isListeningforEvent = false; if (eventTypes == null) { // They are listening for all events from this publisher isListeningforEvent = true; } else { for (String type : eventTypes) { if (event.getEventType().equals(type)) { isListeningforEvent = true; break; } } } if (isListeningforEvent) { try { regCon.getConsumer().eventReceived(event); } catch (Exception e) { log.error(e.getMessage(), e); } } } } private class RegisteredConsumer { private EventConsumer consumer; private String[] eventTypes; private String publisherName; public RegisteredConsumer(EventConsumer consumer, String[] eventTypes) { this.consumer = consumer; this.eventTypes = eventTypes; } public RegisteredConsumer(EventConsumer consumer, String[] eventTypes, String publisherName) { this.consumer = consumer; this.eventTypes = eventTypes; this.publisherName = publisherName; } public EventConsumer getConsumer() { return consumer; } public String[] getEventTypes() { return eventTypes; } public String getPublisherName() { return publisherName; } } private class RegisteredPublisher { private EventPublisher publisher; private String[] eventTypes; private List<RegisteredConsumer> consumers = new ArrayList<RegisteredConsumer>(); public RegisteredPublisher(EventPublisher publisher, String[] eventTypes) { super(); this.publisher = publisher; this.eventTypes = eventTypes; } public EventPublisher getPublisher() { return publisher; } public String[] getEventTypes() { return eventTypes; } public List<RegisteredConsumer> getConsumers() { return consumers; } public void addComsumer(RegisteredConsumer consumer) { this.consumers.add(consumer); } public void addComsumer(EventConsumer consumer, String [] eventTypes) { this.consumers.add(new RegisteredConsumer(consumer, eventTypes)); } public void removeComsumer(EventConsumer consumer) { for (RegisteredConsumer cons : consumers) { if (cons.equals(consumer)) { this.consumers.remove(cons); return; } } } } }
/* * Copyright 2010 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.jsonml; import org.json.JSONArray; import org.json.JSONObject; import java.util.Iterator; /** * JsonMLUtil contains utilities for the JsonML object. * * @author dhans@google.com (Daniel Hans) */ public class JsonMLUtil { /** * Checks if the specified JsonML element represents an expression. */ public static boolean isExpression(JsonML element) { switch (element.getType()) { case ArrayExpr: case AssignExpr: case BinaryExpr: case CallExpr: case ConditionalExpr: case CountExpr: case DeleteExpr: case EvalExpr: case FunctionExpr: case IdExpr: case InvokeExpr: case LiteralExpr: case LogicalAndExpr: case LogicalOrExpr: case MemberExpr: case NewExpr: case ObjectExpr: case RegExpExpr: case ThisExpr: case TypeofExpr: case UnaryExpr: return true; default: return false; } } /** * Parses JSON string which contains serialized JsonML content. * @param jsonml string representation of JsonML * @return root element of a JsonML tree */ public static JsonML parseString(String jsonml) throws Exception { return parseElement(new JSONArray(jsonml)); } private static JsonML parseElement(JSONArray element) throws Exception { JsonML jsonMLElement = new JsonML(TagType.valueOf(element.getString(0))); // set attributes for the JsonML element JSONObject attrs = element.getJSONObject(1); Iterator<?> it = attrs.keys(); while (it.hasNext()) { String key = (String) it.next(); Object value = attrs.get(key); TagAttr tag = TagAttr.get(key); // an unsupported attribute if (tag == null) { continue; } if (value instanceof Number) { value = ((Number) value).doubleValue(); } switch (tag) { case NAME: case BODY: case FLAGS: case OP: case TYPE: case IS_PREFIX: case LABEL: jsonMLElement.setAttribute(tag, value); break; case VALUE: // we do not want to deal with JSONObject.NULL if (value != null && value.equals(null)) { value = null; } // we want all numbers to be stored as double values if (value instanceof Number) { jsonMLElement.setAttribute(tag, ((Number) value).doubleValue()); } else { jsonMLElement.setAttribute(tag, value); } break; default: } } // recursively set children for the JsonML element for (int i = 2; i < element.length(); ++i) { jsonMLElement.appendChild(parseElement(element.getJSONArray(i))); } return jsonMLElement; } /** * Compares two specified JsonML trees. * * Two JsonML nodes are considered to be equal when the following conditions * are met: * * - have the same type * - have the same attributes from the list of attributes to compare * - have the same number of children * - nodes in each pair of corresponding children are equal * * Two JsonML trees are equal, if their roots are equal. * * When two nodes are compared, only the following attributes are taken * into account: * TagAttr.BODY, TagAttr.FLAGS, TagAttr.IS_PREFIX, TagAttr.LABEL, * TagAttr.NAME, TagAttr.OP, TagAttr.TYPE, TagAttr.VALUE * Generally, the comparator does not care about debugging attributes. * * @return * Returns string describing the inequality in the following format: * * The trees are not equal: * * Tree1: * -- string representation of Tree1 * * Tree2: * -- string representation of Tree2 * * Subtree1: * -- string representation of the subtree of the Tree1 which is not * -- equal to the corresponding subtree of the Tree2 * * Subtree2: * -- see Subtree1 * * If the trees are equal, null is returned. */ public static String compare(JsonML tree1, JsonML tree2) { return (new JsonMLComparator(tree1, tree2)).compare(); } /** * Returns true if the trees are equal, false otherwise. */ static boolean compareSilent(JsonML tree1, JsonML tree2) { return (new JsonMLComparator(tree1, tree2)).compareSilent(); } /** * Helper class which actually compares two given JsonML trees. * */ private static class JsonMLComparator { private static final TagAttr[] ATTRS_TO_COMPARE = { TagAttr.BODY, TagAttr.FLAGS, TagAttr.IS_PREFIX, TagAttr.LABEL, TagAttr.NAME, TagAttr.OP, TagAttr.TYPE, TagAttr.VALUE }; private JsonML treeA; private JsonML treeB; private JsonML mismatchA; private JsonML mismatchB; JsonMLComparator(JsonML treeA, JsonML treeB) { this.treeA = treeA; this.treeB = treeB; if (compareElements(treeA, treeB)) { mismatchA = null; mismatchB = null; } } private boolean setMismatch(JsonML a, JsonML b) { mismatchA = a; mismatchB = b; return false; } /** * Check if two elements are equal (including comparing their children). */ private boolean compareElements(JsonML a, JsonML b) { // the elements are considered to be equal if they are both null if (a == null || b == null) { if (a == null && b == null) { return true; } else { return setMismatch(a, b); } } // the elements themselves have to be equivalent if (!areEquivalent(a, b)) { return setMismatch(a, b); } // they both have to have the same number of children if (a.childrenSize() != b.childrenSize()) { return setMismatch(a, b); } // all the children has to be the same Iterator<JsonML> itA = a.getChildren().listIterator(); Iterator<JsonML> itB = b.getChildren().listIterator(); while (itA.hasNext()) { if (!compareElements(itA.next(), itB.next())) { return false; } } return true; } /** * Checks if two elements are semantically the same. */ private boolean areEquivalent(JsonML a, JsonML b) { // both elements must have the same type if (a.getType() != b.getType()) { return false; } for (TagAttr attr : ATTRS_TO_COMPARE) { if (!compareAttribute(attr, a, b)) { return false; } } return true; } private boolean compareAttribute(TagAttr attr, JsonML a, JsonML b) { Object valueA = a.getAttributes().get(attr); Object valueB = b.getAttributes().get(attr); // none of the elements have the attribute if (valueA == null && valueB == null) { return true; } // only one of the elements has the attribute if (valueA == null || valueB == null) { return false; } // check if corresponding values are equal if (!(valueA.equals(valueB))) { // there is still a chance that both attributes are numbers, but are // represented by different classes Double doubleA = null, doubleB = null; if (valueA instanceof Number) { doubleA = ((Number) valueA).doubleValue(); } else if (valueA instanceof String) { doubleA = Double.valueOf((String) valueA); } else { return false; } if (valueB instanceof Number) { doubleB = ((Number) valueB).doubleValue(); } else if (valueB instanceof String) { doubleB = Double.valueOf((String) valueB); } else { return false; } if (!doubleA.equals(doubleB)) { return false; } } return true; } private boolean compareSilent() { return mismatchA == null && mismatchB == null; } private String compare() { if (compareSilent()) { return null; } return "The trees are not equal: " + "\n\nTree1:\n " + treeA.toStringTree() + "\n\nTree2:\n " + treeB.toStringTree() + "\n\nSubtree1:\n " + mismatchA.toStringTree() + "\n\nSubtree2:\n " + mismatchB.toStringTree(); } } }
/* * Copyright (C) 2008 ZXing authors * Copyright 2011 Robert Theis * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mercuriete.mrz.reader; import com.mercuriete.mrz.reader.camera.CameraManager; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.Gravity; import android.widget.Toast; /** * This class handles all the messaging which comprises the state machine for capture. * * The code for this class was adapted from the ZXing project: http://code.google.com/p/zxing/ */ final class CaptureActivityHandler extends Handler { private static final String TAG = CaptureActivityHandler.class.getSimpleName(); private final CaptureActivity activity; private final DecodeThread decodeThread; private static State state; private final CameraManager cameraManager; private enum State { PREVIEW, PREVIEW_PAUSED, CONTINUOUS, CONTINUOUS_PAUSED, SUCCESS, DONE } CaptureActivityHandler(CaptureActivity activity, CameraManager cameraManager, boolean isContinuousModeActive) { this.activity = activity; this.cameraManager = cameraManager; // Start ourselves capturing previews (and decoding if using continuous recognition mode). cameraManager.startPreview(); decodeThread = new DecodeThread(activity); decodeThread.start(); if (isContinuousModeActive) { state = State.CONTINUOUS; // Show the shutter and torch buttons activity.setButtonVisibility(true); // Display a "be patient" message while first recognition request is running activity.setStatusViewForContinuous(); restartOcrPreviewAndDecode(); } else { state = State.SUCCESS; // Show the shutter and torch buttons activity.setButtonVisibility(true); restartOcrPreview(); } } @Override public void handleMessage(Message message) { switch (message.what) { case R.id.restart_preview: restartOcrPreview(); break; case R.id.ocr_continuous_decode_failed: DecodeHandler.resetDecodeState(); try { activity.handleOcrContinuousDecode((OcrResultFailure) message.obj); } catch (NullPointerException e) { Log.w(TAG, "got bad OcrResultFailure", e); } if (state == State.CONTINUOUS) { restartOcrPreviewAndDecode(); } break; case R.id.ocr_continuous_decode_succeeded: DecodeHandler.resetDecodeState(); try { activity.handleOcrContinuousDecode((OcrResult) message.obj); } catch (NullPointerException e) { // Continue } if (state == State.CONTINUOUS) { restartOcrPreviewAndDecode(); } break; case R.id.ocr_decode_succeeded: state = State.SUCCESS; activity.setShutterButtonClickable(true); activity.handleOcrDecode((OcrResult) message.obj); break; case R.id.ocr_decode_failed: state = State.PREVIEW; activity.setShutterButtonClickable(true); Toast toast = Toast.makeText(activity.getBaseContext(), "OCR failed. Please try again.", Toast.LENGTH_SHORT); toast.setGravity(Gravity.TOP, 0, 0); toast.show(); break; } } void stop() { // TODO See if this should be done by sending a quit message to decodeHandler as is done // below in quitSynchronously(). Log.d(TAG, "Setting state to CONTINUOUS_PAUSED."); state = State.CONTINUOUS_PAUSED; removeMessages(R.id.ocr_continuous_decode); removeMessages(R.id.ocr_decode); removeMessages(R.id.ocr_continuous_decode_failed); removeMessages(R.id.ocr_continuous_decode_succeeded); // TODO are these removeMessages() calls doing anything? // Freeze the view displayed to the user. // CameraManager.get().stopPreview(); } void resetState() { //Log.d(TAG, "in restart()"); if (state == State.CONTINUOUS_PAUSED) { Log.d(TAG, "Setting state to CONTINUOUS"); state = State.CONTINUOUS; restartOcrPreviewAndDecode(); } } void quitSynchronously() { state = State.DONE; if (cameraManager != null) { cameraManager.stopPreview(); } //Message quit = Message.obtain(decodeThread.getHandler(), R.id.quit); try { //quit.sendToTarget(); // This always gives "sending message to a Handler on a dead thread" // Wait at most half a second; should be enough time, and onPause() will timeout quickly decodeThread.join(500L); } catch (InterruptedException e) { Log.w(TAG, "Caught InterruptedException in quitSyncronously()", e); // continue // Restore interrupted state... Thread.currentThread().interrupt(); } catch (RuntimeException e) { Log.w(TAG, "Caught RuntimeException in quitSyncronously()", e); // continue } catch (Exception e) { Log.w(TAG, "Caught unknown Exception in quitSynchronously()", e); } // Be absolutely sure we don't send any queued up messages removeMessages(R.id.ocr_continuous_decode); removeMessages(R.id.ocr_decode); } /** * Start the preview, but don't try to OCR anything until the user presses the shutter button. */ private void restartOcrPreview() { // Display the shutter and torch buttons activity.setButtonVisibility(true); if (state == State.SUCCESS) { state = State.PREVIEW; // Draw the viewfinder. activity.drawViewfinder(); } } /** * Send a decode request for realtime OCR mode */ private void restartOcrPreviewAndDecode() { // Continue capturing camera frames cameraManager.startPreview(); // Continue requesting decode of images cameraManager.requestOcrDecode(decodeThread.getHandler(), R.id.ocr_continuous_decode); activity.drawViewfinder(); } /** * Request OCR on the current preview frame. */ private void ocrDecode() { state = State.PREVIEW_PAUSED; cameraManager.requestOcrDecode(decodeThread.getHandler(), R.id.ocr_decode); } /** * Request OCR when the hardware shutter button is clicked. */ void hardwareShutterButtonClick() { // Ensure that we're not in continuous recognition mode if (state == State.PREVIEW) { ocrDecode(); } } /** * Request OCR when the on-screen shutter button is clicked. */ void shutterButtonClick() { // Disable further clicks on this button until OCR request is finished activity.setShutterButtonClickable(false); ocrDecode(); } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android.xml; import com.android.resources.ResourceType; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.android.AndroidDataWritingVisitor; import com.google.devtools.build.android.AndroidDataWritingVisitor.StartTag; import com.google.devtools.build.android.AndroidResourceClassWriter; import com.google.devtools.build.android.FullyQualifiedName; import com.google.devtools.build.android.XmlResourceValue; import com.google.devtools.build.android.XmlResourceValues; import com.google.devtools.build.android.proto.SerializeFormat; import com.google.devtools.build.android.proto.SerializeFormat.DataValueXml.Builder; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Path; import java.util.Arrays; import java.util.Objects; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import javax.xml.namespace.QName; /** * Represents a simple Android resource xml value. * * <p> * There is a class of resources that are simple name/value pairs: string * (http://developer.android.com/guide/topics/resources/string-resource.html), bool * (http://developer.android.com/guide/topics/resources/more-resources.html#Bool), color * (http://developer.android.com/guide/topics/resources/more-resources.html#Color), and dimen * (http://developer.android.com/guide/topics/resources/more-resources.html#Dimension). These are * defined in xml as &lt;<em>resource type</em> name="<em>name</em>" value="<em>value</em>"&gt;. In * the interest of keeping the parsing svelte, these are represented by a single class. */ @Immutable public class SimpleXmlResourceValue implements XmlResourceValue { static final QName TAG_BOOL = QName.valueOf("bool"); static final QName TAG_COLOR = QName.valueOf("color"); static final QName TAG_DIMEN = QName.valueOf("dimen"); static final QName TAG_DRAWABLE = QName.valueOf("drawable"); static final QName TAG_FRACTION = QName.valueOf("fraction"); static final QName TAG_INTEGER = QName.valueOf("integer"); static final QName TAG_ITEM = QName.valueOf("item"); static final QName TAG_PUBLIC = QName.valueOf("public"); static final QName TAG_STRING = QName.valueOf("string"); /** Provides an enumeration resource type and simple value validation. */ public enum Type { BOOL(TAG_BOOL) { @Override public boolean validate(String value) { final String cleanValue = value.toLowerCase().trim(); return "true".equals(cleanValue) || "false".equals(cleanValue); } }, COLOR(TAG_COLOR) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the hex color. return true; } }, DIMEN(TAG_DIMEN) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the dimension type. return true; } }, DRAWABLE(TAG_DRAWABLE) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the drawable type. return true; } }, FRACTION(TAG_FRACTION) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the fraction type. return true; } }, INTEGER(TAG_INTEGER) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the integer type. return true; } }, ITEM(TAG_ITEM) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the item type. return true; } }, PUBLIC(TAG_PUBLIC) { @Override public boolean validate(String value) { // TODO(corysmith): Validate the public type. return true; } }, STRING(TAG_STRING) { @Override public boolean validate(String value) { return true; } }; private QName tagName; Type(QName tagName) { this.tagName = tagName; } abstract boolean validate(String value); public static Type from(ResourceType resourceType) { for (Type valueType : values()) { if (valueType.tagName.getLocalPart().equals(resourceType.getName())) { return valueType; } else if (resourceType.getName().equalsIgnoreCase(valueType.name())) { return valueType; } } throw new IllegalArgumentException( String.format( "%s resource type not found in available types: %s", resourceType, Arrays.toString(values()))); } } private final ImmutableMap<String, String> attributes; @Nullable private final String value; private final Type valueType; public static XmlResourceValue createWithValue(Type valueType, String value) { return of(valueType, ImmutableMap.<String, String>of(), value); } public static XmlResourceValue withAttributes( Type valueType, ImmutableMap<String, String> attributes) { return of(valueType, attributes, null); } public static XmlResourceValue itemWithFormattedValue( ResourceType resourceType, String format, String value) { return of(Type.ITEM, ImmutableMap.of("type", resourceType.getName(), "format", format), value); } public static XmlResourceValue itemWithValue( ResourceType resourceType, String value) { return of(Type.ITEM, ImmutableMap.of("type", resourceType.getName()), value); } public static XmlResourceValue itemPlaceHolderFor(ResourceType resourceType) { return withAttributes(Type.ITEM, ImmutableMap.of("type", resourceType.getName())); } public static XmlResourceValue of( Type valueType, ImmutableMap<String, String> attributes, @Nullable String value) { return new SimpleXmlResourceValue(valueType, attributes, value); } private SimpleXmlResourceValue( Type valueType, ImmutableMap<String, String> attributes, String value) { this.valueType = valueType; this.value = value; this.attributes = attributes; } @Override public void write( FullyQualifiedName key, Path source, AndroidDataWritingVisitor mergedDataWriter) { StartTag startTag = mergedDataWriter .define(key) .derivedFrom(source) .startTag(valueType.tagName) .named(key) .addAttributesFrom(attributes.entrySet()); if (value != null) { startTag.closeTag().addCharactersOf(value).endTag().save(); } else { startTag.closeUnaryTag().save(); } } @SuppressWarnings("deprecation") public static XmlResourceValue from(SerializeFormat.DataValueXml proto) { return of( Type.valueOf(proto.getValueType()), ImmutableMap.copyOf(proto.getAttribute()), proto.hasValue() ? proto.getValue() : null); } @Override public void writeResourceToClass(FullyQualifiedName key, AndroidResourceClassWriter resourceClassWriter) { resourceClassWriter.writeSimpleResource(key.type(), key.name()); } @Override public int serializeTo(int sourceId, Namespaces namespaces, OutputStream output) throws IOException { SerializeFormat.DataValue.Builder builder = XmlResourceValues.newSerializableDataValueBuilder(sourceId); Builder xmlValueBuilder = builder .getXmlValueBuilder() .putAllNamespace(namespaces.asMap()) .setType(SerializeFormat.DataValueXml.XmlType.SIMPLE) // TODO(corysmith): Find a way to avoid writing strings to the serialized format // it's inefficient use of space and costs more when deserializing. .putAllAttribute(attributes); if (value != null) { xmlValueBuilder.setValue(value); } builder.setXmlValue(xmlValueBuilder.setValueType(valueType.name())); return XmlResourceValues.serializeProtoDataValue(output, builder); } @Override public int hashCode() { return Objects.hash(valueType, attributes, value); } @Override public boolean equals(Object obj) { if (!(obj instanceof SimpleXmlResourceValue)) { return false; } SimpleXmlResourceValue other = (SimpleXmlResourceValue) obj; return Objects.equals(valueType, other.valueType) && Objects.equals(attributes, attributes) && Objects.equals(value, other.value); } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("valueType", valueType) .add("attributes", attributes) .add("value", value) .toString(); } @Override public XmlResourceValue combineWith(XmlResourceValue value) { throw new IllegalArgumentException(this + " is not a combinable resource."); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.mesos.runtime.clusterframework; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.mesos.Utils; import org.apache.flink.mesos.scheduler.LaunchableTask; import org.apache.flink.mesos.util.MesosArtifactResolver; import org.apache.flink.mesos.util.MesosArtifactServer; import org.apache.flink.mesos.util.MesosConfiguration; import org.apache.flink.runtime.clusterframework.ContainerSpecification; import org.apache.flink.runtime.clusterframework.ContaineredTaskManagerParameters; import org.apache.flink.util.Preconditions; import com.netflix.fenzo.ConstraintEvaluator; import com.netflix.fenzo.TaskAssignmentResult; import com.netflix.fenzo.TaskRequest; import com.netflix.fenzo.VMTaskFitnessCalculator; import org.apache.mesos.Protos; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import scala.Option; import static org.apache.flink.mesos.Utils.range; import static org.apache.flink.mesos.Utils.ranges; import static org.apache.flink.mesos.Utils.scalar; import static org.apache.flink.mesos.Utils.variable; /** * Implements the launch of a Mesos worker. * * <p>Translates the abstract {@link ContainerSpecification} into a concrete * Mesos-specific {@link Protos.TaskInfo}. */ public class LaunchableMesosWorker implements LaunchableTask { protected static final Logger LOG = LoggerFactory.getLogger(LaunchableMesosWorker.class); /** * The set of configuration keys to be dynamically configured with a port allocated from Mesos. */ private static final String[] TM_PORT_KEYS = { "taskmanager.rpc.port", "taskmanager.data.port"}; private final MesosArtifactResolver resolver; private final ContainerSpecification containerSpec; private final MesosTaskManagerParameters params; private final Protos.TaskID taskID; private final Request taskRequest; private final MesosConfiguration mesosConfiguration; /** * Construct a launchable Mesos worker. * @param resolver The resolver for retrieving artifacts (e.g. jars, configuration) * @param params the TM parameters such as memory, cpu to acquire. * @param containerSpec an abstract container specification for launch time. * @param taskID the taskID for this worker. */ public LaunchableMesosWorker( MesosArtifactResolver resolver, MesosTaskManagerParameters params, ContainerSpecification containerSpec, Protos.TaskID taskID, MesosConfiguration mesosConfiguration) { this.resolver = Preconditions.checkNotNull(resolver); this.containerSpec = Preconditions.checkNotNull(containerSpec); this.params = Preconditions.checkNotNull(params); this.taskID = Preconditions.checkNotNull(taskID); this.mesosConfiguration = Preconditions.checkNotNull(mesosConfiguration); this.taskRequest = new Request(); } public Protos.TaskID taskID() { return taskID; } @Override public TaskRequest taskRequest() { return taskRequest; } class Request implements TaskRequest { private final AtomicReference<TaskRequest.AssignedResources> assignedResources = new AtomicReference<>(); @Override public String getId() { return taskID.getValue(); } @Override public String taskGroupName() { return ""; } @Override public double getCPUs() { return params.cpus(); } @Override public double getMemory() { return params.containeredParameters().taskManagerTotalMemoryMB(); } @Override public double getNetworkMbps() { return 0.0; } @Override public double getDisk() { return 0.0; } @Override public int getPorts() { return TM_PORT_KEYS.length; } @Override public Map<String, NamedResourceSetRequest> getCustomNamedResources() { return Collections.emptyMap(); } @Override public List<? extends ConstraintEvaluator> getHardConstraints() { return params.constraints(); } @Override public List<? extends VMTaskFitnessCalculator> getSoftConstraints() { return null; } @Override public void setAssignedResources(AssignedResources assignedResources) { this.assignedResources.set(assignedResources); } @Override public AssignedResources getAssignedResources() { return assignedResources.get(); } @Override public String toString() { return "Request{" + "cpus=" + getCPUs() + "memory=" + getMemory() + '}'; } } /** * Construct the TaskInfo needed to launch the worker. * @param slaveId the assigned slave. * @param assignment the assignment details. * @return a fully-baked TaskInfo. */ @Override public Protos.TaskInfo launch(Protos.SlaveID slaveId, TaskAssignmentResult assignment) { ContaineredTaskManagerParameters tmParams = params.containeredParameters(); final Configuration dynamicProperties = new Configuration(); // incorporate the dynamic properties set by the template dynamicProperties.addAll(containerSpec.getDynamicConfiguration()); // build a TaskInfo with assigned resources, environment variables, etc final Protos.TaskInfo.Builder taskInfo = Protos.TaskInfo.newBuilder() .setSlaveId(slaveId) .setTaskId(taskID) .setName(taskID.getValue()) .addResources(scalar("cpus", assignment.getRequest().getCPUs())) .addResources(scalar("mem", assignment.getRequest().getMemory())); final Protos.CommandInfo.Builder cmd = taskInfo.getCommandBuilder(); final Protos.Environment.Builder env = cmd.getEnvironmentBuilder(); final StringBuilder jvmArgs = new StringBuilder(); //configure task manager hostname property if hostname override property is supplied Option<String> taskManagerHostnameOption = params.getTaskManagerHostname(); if (taskManagerHostnameOption.isDefined()) { // replace the TASK_ID pattern by the actual task id value of the Mesos task final String taskManagerHostname = MesosTaskManagerParameters.TASK_ID_PATTERN .matcher(taskManagerHostnameOption.get()) .replaceAll(Matcher.quoteReplacement(taskID.getValue())); dynamicProperties.setString(ConfigConstants.TASK_MANAGER_HOSTNAME_KEY, taskManagerHostname); } // use the assigned ports for the TM if (assignment.getAssignedPorts().size() < TM_PORT_KEYS.length) { throw new IllegalArgumentException("unsufficient # of ports assigned"); } for (int i = 0; i < TM_PORT_KEYS.length; i++) { int port = assignment.getAssignedPorts().get(i); String key = TM_PORT_KEYS[i]; taskInfo.addResources(ranges("ports", range(port, port))); dynamicProperties.setInteger(key, port); } // ship additional files for (ContainerSpecification.Artifact artifact : containerSpec.getArtifacts()) { cmd.addUris(Utils.uri(resolver, artifact)); } // propagate environment variables for (Map.Entry<String, String> entry : params.containeredParameters().taskManagerEnv().entrySet()) { env.addVariables(variable(entry.getKey(), entry.getValue())); } for (Map.Entry<String, String> entry : containerSpec.getEnvironmentVariables().entrySet()) { env.addVariables(variable(entry.getKey(), entry.getValue())); } // propagate the Mesos task ID to the TM env.addVariables(variable(MesosConfigKeys.ENV_FLINK_CONTAINER_ID, taskInfo.getTaskId().getValue())); // finalize the memory parameters jvmArgs.append(" -Xms").append(tmParams.taskManagerHeapSizeMB()).append("m"); jvmArgs.append(" -Xmx").append(tmParams.taskManagerHeapSizeMB()).append("m"); if (tmParams.taskManagerDirectMemoryLimitMB() >= 0) { jvmArgs.append(" -XX:MaxDirectMemorySize=").append(tmParams.taskManagerDirectMemoryLimitMB()).append("m"); } // pass dynamic system properties jvmArgs.append(' ').append( ContainerSpecification.formatSystemProperties(containerSpec.getSystemProperties())); // finalize JVM args env.addVariables(variable(MesosConfigKeys.ENV_JVM_ARGS, jvmArgs.toString())); // populate TASK_NAME and FRAMEWORK_NAME environment variables to the TM container env.addVariables(variable(MesosConfigKeys.ENV_TASK_NAME, taskInfo.getTaskId().getValue())); env.addVariables(variable(MesosConfigKeys.ENV_FRAMEWORK_NAME, mesosConfiguration.frameworkInfo().getName())); // build the launch command w/ dynamic application properties StringBuilder launchCommand = new StringBuilder(); if (params.bootstrapCommand().isDefined()) { launchCommand.append(params.bootstrapCommand().get()).append(" && "); } launchCommand .append(params.command()) .append(" ") .append(ContainerSpecification.formatSystemProperties(dynamicProperties)); cmd.setValue(launchCommand.toString()); // build the container info Protos.ContainerInfo.Builder containerInfo = Protos.ContainerInfo.newBuilder(); // in event that no docker image or mesos image name is specified, we must still // set type to MESOS containerInfo.setType(Protos.ContainerInfo.Type.MESOS); switch (params.containerType()) { case MESOS: if (params.containerImageName().isDefined()) { containerInfo .setMesos(Protos.ContainerInfo.MesosInfo.newBuilder() .setImage(Protos.Image.newBuilder() .setType(Protos.Image.Type.DOCKER) .setDocker(Protos.Image.Docker.newBuilder() .setName(params.containerImageName().get())))); } break; case DOCKER: assert(params.containerImageName().isDefined()); containerInfo .setType(Protos.ContainerInfo.Type.DOCKER) .setDocker(Protos.ContainerInfo.DockerInfo.newBuilder() .setNetwork(Protos.ContainerInfo.DockerInfo.Network.HOST) .setImage(params.containerImageName().get())); break; default: throw new IllegalStateException("unsupported container type"); } // add any volumes to the containerInfo containerInfo.addAllVolumes(params.containerVolumes()); taskInfo.setContainer(containerInfo); return taskInfo.build(); } @Override public String toString() { return "LaunchableMesosWorker{" + "taskID=" + taskID + "taskRequest=" + taskRequest + '}'; } /** * Configures an artifact server to serve the artifacts associated with a container specification. * @param server the server to configure. * @param container the container with artifacts to serve. * @throws IOException if the artifacts cannot be accessed. */ static void configureArtifactServer(MesosArtifactServer server, ContainerSpecification container) throws IOException { // serve the artifacts associated with the container environment for (ContainerSpecification.Artifact artifact : container.getArtifacts()) { server.addPath(artifact.source, artifact.dest); } } }
/* * Copyright 2008-2009 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.module.purap.document; import java.sql.Date; import java.util.ArrayList; import java.util.List; import org.kuali.kfs.module.purap.businessobject.Carrier; import org.kuali.kfs.module.purap.businessobject.CorrectionReceivingItem; import org.kuali.kfs.module.purap.businessobject.DeliveryRequiredDateReason; import org.kuali.kfs.module.purap.businessobject.LineItemReceivingItem; import org.kuali.kfs.module.purap.businessobject.ReceivingItem; import org.kuali.kfs.module.purap.document.service.ReceivingService; import org.kuali.kfs.sys.KFSPropertyConstants; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.vnd.businessobject.CampusParameter; import org.kuali.kfs.vnd.businessobject.VendorDetail; import org.kuali.rice.kew.framework.postprocessor.DocumentRouteStatusChange; import org.kuali.rice.krad.util.ObjectUtils; import org.kuali.rice.location.framework.country.CountryEbo; /** * @author Kuali Nervous System Team (kualidev@oncourse.iu.edu) */ public class CorrectionReceivingDocument extends ReceivingDocumentBase { protected String lineItemReceivingDocumentNumber; //Collections protected List<CorrectionReceivingItem> items; protected LineItemReceivingDocument lineItemReceivingDocument; /** * Default constructor. */ public CorrectionReceivingDocument() { super(); items = new ArrayList<CorrectionReceivingItem>(); } public void populateCorrectionReceivingFromReceivingLine(LineItemReceivingDocument rlDoc){ //populate receiving line document from purchase order this.setPurchaseOrderIdentifier( rlDoc.getPurchaseOrderIdentifier() ); this.getDocumentHeader().setDocumentDescription( rlDoc.getDocumentHeader().getDocumentDescription()); this.getDocumentHeader().setOrganizationDocumentNumber( rlDoc.getDocumentHeader().getOrganizationDocumentNumber() ); this.setAccountsPayablePurchasingDocumentLinkIdentifier( rlDoc.getAccountsPayablePurchasingDocumentLinkIdentifier() ); this.setLineItemReceivingDocumentNumber(rlDoc.getDocumentNumber()); //copy receiving line items for (LineItemReceivingItem rli : (List<LineItemReceivingItem>) rlDoc.getItems()) { this.getItems().add(new CorrectionReceivingItem(rli, this)); } } @Override public void doRouteStatusChange(DocumentRouteStatusChange statusChangeEvent) { if(this.getFinancialSystemDocumentHeader().getWorkflowDocument().isProcessed()) { SpringContext.getBean(ReceivingService.class).completeCorrectionReceivingDocument(this); } super.doRouteStatusChange(statusChangeEvent); } /** * Gets the lineItemReceivingDocumentNumber attribute. * * @return Returns the lineItemReceivingDocumentNumber * */ public String getLineItemReceivingDocumentNumber() { return lineItemReceivingDocumentNumber; } /** * Sets the lineItemReceivingDocumentNumber attribute. * * @param lineItemReceivingDocumentNumber The lineItemReceivingDocumentNumber to set. * */ public void setLineItemReceivingDocumentNumber(String lineItemReceivingDocumentNumber) { this.lineItemReceivingDocumentNumber = lineItemReceivingDocumentNumber; } /** * Gets the lineItemReceivingDocument attribute. * @return Returns the lineItemReceivingDocument. */ public LineItemReceivingDocument getLineItemReceivingDocument() { refreshLineReceivingDocument(); return lineItemReceivingDocument; } @Override public void processAfterRetrieve() { super.processAfterRetrieve(); refreshLineReceivingDocument(); } protected void refreshLineReceivingDocument(){ if(ObjectUtils.isNull(lineItemReceivingDocument) || lineItemReceivingDocument.getDocumentNumber() == null){ this.refreshReferenceObject("lineItemReceivingDocument"); if (ObjectUtils.isNull(lineItemReceivingDocument.getDocumentHeader().getDocumentNumber())) { lineItemReceivingDocument.refreshReferenceObject(KFSPropertyConstants.DOCUMENT_HEADER); } }else{ if (ObjectUtils.isNull(lineItemReceivingDocument.getDocumentHeader().getDocumentNumber())) { lineItemReceivingDocument.refreshReferenceObject(KFSPropertyConstants.DOCUMENT_HEADER); } } } @Override public Integer getPurchaseOrderIdentifier() { if (ObjectUtils.isNull(super.getPurchaseOrderIdentifier())){ refreshLineReceivingDocument(); if (ObjectUtils.isNotNull(lineItemReceivingDocument)){ setPurchaseOrderIdentifier(lineItemReceivingDocument.getPurchaseOrderIdentifier()); } } return super.getPurchaseOrderIdentifier(); } /** * Sets the lineItemReceivingDocument attribute value. * @param lineItemReceivingDocument The lineItemReceivingDocument to set. * @deprecated */ public void setLineItemReceivingDocument(LineItemReceivingDocument lineItemReceivingDocument) { this.lineItemReceivingDocument = lineItemReceivingDocument; } @Override public Class getItemClass() { return CorrectionReceivingItem.class; } @Override public List getItems() { return items; } @Override public void setItems(List items) { this.items = items; } @Override public ReceivingItem getItem(int pos) { return (ReceivingItem) items.get(pos); } public void addItem(ReceivingItem item) { getItems().add(item); } public void deleteItem(int lineNum) { if (getItems().remove(lineNum) == null) { // throw error here } } @Override public Integer getAlternateVendorDetailAssignedIdentifier() { return getLineItemReceivingDocument().getAlternateVendorDetailAssignedIdentifier(); } @Override public Integer getAlternateVendorHeaderGeneratedIdentifier() { return getLineItemReceivingDocument().getAlternateVendorHeaderGeneratedIdentifier(); } @Override public String getAlternateVendorName() { return getLineItemReceivingDocument().getAlternateVendorName(); } @Override public String getAlternateVendorNumber() { return getLineItemReceivingDocument().getAlternateVendorNumber(); } @Override public Carrier getCarrier() { return getLineItemReceivingDocument().getCarrier(); } @Override public String getCarrierCode() { return getLineItemReceivingDocument().getCarrierCode(); } @Override public String getDeliveryBuildingCode() { return getLineItemReceivingDocument().getDeliveryBuildingCode(); } @Override public String getDeliveryBuildingLine1Address() { return getLineItemReceivingDocument().getDeliveryBuildingLine1Address(); } @Override public String getDeliveryBuildingLine2Address() { return getLineItemReceivingDocument().getDeliveryBuildingLine2Address(); } @Override public String getDeliveryBuildingName() { return getLineItemReceivingDocument().getDeliveryBuildingName(); } @Override public String getDeliveryBuildingRoomNumber() { return getLineItemReceivingDocument().getDeliveryBuildingRoomNumber(); } @Override public CampusParameter getDeliveryCampus() { return getLineItemReceivingDocument().getDeliveryCampus(); } @Override public String getDeliveryCampusCode() { return getLineItemReceivingDocument().getDeliveryCampusCode(); } @Override public String getDeliveryCityName() { return getLineItemReceivingDocument().getDeliveryCityName(); } @Override public String getDeliveryCountryCode() { return getLineItemReceivingDocument().getDeliveryCountryCode(); } @Override public String getDeliveryInstructionText() { return getLineItemReceivingDocument().getDeliveryInstructionText(); } @Override public String getDeliveryPostalCode() { return getLineItemReceivingDocument().getDeliveryPostalCode(); } @Override public Date getDeliveryRequiredDate() { return getLineItemReceivingDocument().getDeliveryRequiredDate(); } @Override public DeliveryRequiredDateReason getDeliveryRequiredDateReason() { return getLineItemReceivingDocument().getDeliveryRequiredDateReason(); } @Override public String getDeliveryRequiredDateReasonCode() { return getLineItemReceivingDocument().getDeliveryRequiredDateReasonCode(); } @Override public String getDeliveryStateCode() { return getLineItemReceivingDocument().getDeliveryStateCode(); } @Override public String getDeliveryToEmailAddress() { return getLineItemReceivingDocument().getDeliveryToEmailAddress(); } @Override public String getDeliveryToName() { return getLineItemReceivingDocument().getDeliveryToName(); } @Override public String getDeliveryToPhoneNumber() { return getLineItemReceivingDocument().getDeliveryToPhoneNumber(); } @Override public String getShipmentBillOfLadingNumber() { return getLineItemReceivingDocument().getShipmentBillOfLadingNumber(); } @Override public String getShipmentPackingSlipNumber() { return getLineItemReceivingDocument().getShipmentPackingSlipNumber(); } @Override public Date getShipmentReceivedDate() { return getLineItemReceivingDocument().getShipmentReceivedDate(); } @Override public String getShipmentReferenceNumber() { return getLineItemReceivingDocument().getShipmentReferenceNumber(); } @Override public Integer getVendorAddressGeneratedIdentifier() { return getLineItemReceivingDocument().getVendorAddressGeneratedIdentifier(); } @Override public String getVendorCityName() { return getLineItemReceivingDocument().getVendorCityName(); } @Override public CountryEbo getVendorCountry() { return getLineItemReceivingDocument().getVendorCountry(); } @Override public String getVendorCountryCode() { return getLineItemReceivingDocument().getVendorCountryCode(); } @Override public VendorDetail getVendorDetail() { return getLineItemReceivingDocument().getVendorDetail(); } @Override public Integer getVendorDetailAssignedIdentifier() { return getLineItemReceivingDocument().getVendorDetailAssignedIdentifier(); } @Override public Integer getVendorHeaderGeneratedIdentifier() { return getLineItemReceivingDocument().getVendorHeaderGeneratedIdentifier(); } @Override public String getVendorLine1Address() { return getLineItemReceivingDocument().getVendorLine1Address(); } @Override public String getVendorLine2Address() { return getLineItemReceivingDocument().getVendorLine2Address(); } @Override public String getVendorName() { return getLineItemReceivingDocument().getVendorName(); } @Override public String getVendorNumber() { return getLineItemReceivingDocument().getVendorNumber(); } @Override public String getVendorPostalCode() { return getLineItemReceivingDocument().getVendorPostalCode(); } @Override public String getVendorStateCode() { return getLineItemReceivingDocument().getVendorStateCode(); } @Override public List buildListOfDeletionAwareLists() { List managedLists = super.buildListOfDeletionAwareLists(); managedLists.add(this.getItems()); return managedLists; } }