gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.phylogeny.simulatednights.command; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.annotation.Nullable; import net.minecraft.command.CommandBase; import net.minecraft.command.CommandException; import net.minecraft.command.ICommandSender; import net.minecraft.command.NumberInvalidException; import net.minecraft.server.MinecraftServer; import net.minecraft.util.math.BlockPos; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.TextComponentTranslation; import net.minecraft.util.text.TextFormatting; import net.minecraft.world.WorldServer; import org.apache.commons.lang3.tuple.MutablePair; import com.phylogeny.simulatednights.SimulationHandler; import com.phylogeny.simulatednights.SimulationHandler.TickCountCommand; import com.phylogeny.simulatednights.reference.Config; import com.phylogeny.simulatednights.reference.LangKey; public class CommandSimulate extends CommandBase { public static final String NAME = "simulate"; @Override public String getName() { return NAME; } @Override public int getRequiredPermissionLevel() { return Config.commandPermissionLevel; } @Override public String getUsage(ICommandSender sender) { return MessageLang.USAGE.getMessageString(); } @Override public void execute(MinecraftServer server, ICommandSender sender, String[] args) throws CommandException { int argCount = args.length; boolean timeMode = argCount > 0 && args[0].equalsIgnoreCase("time"); if (argCount == 0 || !(timeMode || args[0].equalsIgnoreCase("ticks"))) { MessageLang.ARGUMENTS_FIRST.sendMessage(sender); return; } int simulatedTicks = -1; boolean setMode = false; int startIndex; if (timeMode) { setMode = argCount > 1 && args[1].equalsIgnoreCase("set"); if (argCount == 1 || !(setMode || args[1].equalsIgnoreCase("add"))) { MessageLang.ARGUMENTS_TIME.sendMessage(sender); return; } try { simulatedTicks = parseInt(args[2], 0); } catch (ArrayIndexOutOfBoundsException e) {} catch (NumberInvalidException e) { if (setMode) { if (args[2].equalsIgnoreCase("day")) simulatedTicks = 1000; if (args[2].equalsIgnoreCase("night")) simulatedTicks = 13000; } if (!e.getMessage().equals("commands.generic.num.invalid")) throw e; } if (simulatedTicks < 0) { MessageLang.ARGUMENTS_TIME_AMOUNT.sendMessage(sender); return; } startIndex = 3; } else { try { simulatedTicks = parseInt(args[1], 0); } catch (ArrayIndexOutOfBoundsException e) {} catch (NumberInvalidException e) { if (!e.getMessage().equals("commands.generic.num.invalid")) throw e; } if (simulatedTicks < 0) { MessageLang.ARGUMENTS_TICKS.sendMessage(sender); return; } startIndex = 2; } boolean dimensionSpecified = false; boolean dimensionSet = false; int dimensionId = 0; try { dimensionId = Integer.parseInt(args[startIndex]); dimensionSpecified = true; dimensionSet = true; } catch (ArrayIndexOutOfBoundsException e) {} catch (NumberFormatException e) { boolean useCurrentDimension = args[startIndex].equalsIgnoreCase("this"); if (sender.getEntityWorld() != null && (useCurrentDimension || args[startIndex].equalsIgnoreCase("all"))) { dimensionId = sender.getEntityWorld().provider.getDimension(); dimensionSpecified = true; dimensionSet = useCurrentDimension; } } if (!dimensionSpecified) { MessageLang.ARGUMENTS_DIMENSION.sendMessage(sender); return; } startIndex++; String dimensionInfo = dimensionSet ? " " + dimensionId : " ("; boolean tickAllEntities = true; boolean tickTileEntities = true; boolean tickBlocks = true; boolean runInSingleServerTick = false; int simulatedTicksPerServerTick = 0; if (argCount > startIndex) { List<String> argsRemaining = Arrays.asList(Arrays.copyOfRange(args, startIndex, args.length)); runInSingleServerTick = argsRemaining.contains("singletick"); if (argsRemaining.contains("allentities") || argsRemaining.contains("tileentities") || argsRemaining.contains("blocks")) { tickAllEntities = argsRemaining.contains("allentities"); tickTileEntities = argsRemaining.contains("tileentities"); tickBlocks = argsRemaining.contains("blocks"); } for (String arg : argsRemaining) { try { simulatedTicksPerServerTick = parseInt(arg, 1); } catch (NumberInvalidException e) {} } } boolean foundDimension = false; for (int i = 0; i < server.worlds.length; i++) { WorldServer worldServer = server.worlds[i]; if (worldServer == null) continue; int currentDimensionId = worldServer.provider.getDimension(); if (dimensionSet) { if (currentDimensionId != dimensionId) continue; } else dimensionInfo += currentDimensionId + ", "; foundDimension = true; if (runInSingleServerTick) SimulationHandler.simulateTicks(worldServer, simulatedTicks, timeMode, setMode, tickAllEntities, tickTileEntities, tickBlocks, true, true); else { if (SimulationHandler.SERVER_SIMULATED_TICK_MAP.containsKey(currentDimensionId)) { MessageLang.QUEUE_IN_PROGRESS.sendMessage(sender); return; } int tickCount = timeMode ? simulatedTicks : (int) (simulatedTicks * Config.timeTickPercentage); if (simulatedTicksPerServerTick < 1) simulatedTicksPerServerTick = Config.simulatedTicksPerServerTick; int simulatedTicksCurrent = Math.min(tickCount, simulatedTicksPerServerTick); int remainder = tickCount - simulatedTicksCurrent; SimulationHandler.simulateTicks(worldServer, simulatedTicksCurrent, timeMode, setMode, tickAllEntities, tickTileEntities, tickBlocks, true, remainder == 0); if (remainder > 0) SimulationHandler.SERVER_SIMULATED_TICK_MAP.put(currentDimensionId, new TickCountCommand(remainder, timeMode, setMode, tickAllEntities, tickTileEntities, tickBlocks, simulatedTicksPerServerTick)); } } if (foundDimension) { if (!dimensionSet) dimensionInfo = dimensionInfo.substring(0, dimensionInfo.lastIndexOf(',')) + ")"; List<ITextComponent> messages = new ArrayList<ITextComponent>(); MessageLang messageLang = tickAllEntities ? (tickBlocks ? MessageLang.RESULT_BLOCKS_ALL_ENTITIES : MessageLang.RESULT_ALL_ENTITIES) : (tickBlocks ? (tickTileEntities ? MessageLang.RESULT_BLOCKS_TILEENTITIES : MessageLang.RESULT_BLOCKS) : MessageLang.RESULT_TILEENTITIES); messages.add(messageLang.getMessage((dimensionSet ? MessageLang.DIMENSION_SINGLE : MessageLang.DIMENSION_ALL).getMessage(dimensionId), dimensionInfo + ".")); if (timeMode) messages.add(new TextComponentTranslation("commands.time." + (setMode ? "set" : "added"), Integer.valueOf(simulatedTicks))); if (SimulationHandler.SERVER_SIMULATED_TICK_MAP.containsKey(dimensionId)) SimulationHandler.commandCompletionMessages = new MutablePair<ICommandSender, List<ITextComponent>>(sender, messages); else { for (ITextComponent message : messages) sender.sendMessage(message); } } else { (dimensionSet ? MessageLang.DIMENSION_MISSING_SINGLE : MessageLang.DIMENSION_MISSING_ALL).sendMessage(sender, dimensionId); } } @Override public List<String> getTabCompletions(MinecraftServer server, ICommandSender sender, String[] args, @Nullable BlockPos pos) { int argCount = args.length; if (argCount == 1) return getListOfStringsMatchingLastWord(args, "ticks", "time"); int startIndex = 3; if (args[0].equalsIgnoreCase("time")) { switch (argCount) { case 2: return getListOfStringsMatchingLastWord(args, "set", "add"); case 3: return getListOfStringsMatchingLastWord(args, "day", "night"); } startIndex++; } List<String> options = new ArrayList<String>(); if (argCount == startIndex) { options.add("all"); options.add("this"); for (int i = 0; i < server.worlds.length; i++) { WorldServer worldServer = server.worlds[i]; if (worldServer != null) options.add(Integer.toString(worldServer.provider.getDimension())); } options = getListOfStringsMatchingLastWord(args, options); } if (argCount > startIndex) { List<String> argsRemaining = Arrays.asList(Arrays.copyOfRange(args, startIndex, args.length)); String[] optionalArgs = new String[]{"allentities", "blocks", "singletick"}; for (String option : optionalArgs) { if (!argsRemaining.contains(option)) options.add(option); } if (!argsRemaining.contains("tileentities") && !argsRemaining.contains("allentities")) options.add("tileentities"); options = getListOfStringsMatchingLastWord(args, options); } return options; } private static enum MessageLang { USAGE("usage", "/simulate" + "\n - <ticks : time>" + "\n - follow 'ticks' with: <number of ticks to simulate>" + "\n - follow 'time' with: <set : add>" + "\n - follow 'set' with: <day : night : time to set>" + "\n - follow 'add' with: <time to add>" + "\n - <dimension id : 'all' : 'this' for current dimension>" + "\n - [tileentities : allentities : blocks (tick those specified)]" + "\n - [simulated ticks per server tick (default if absent)]" + "\n - ['singletick' to run simulation all in one server tick]"), ARGUMENTS_FIRST("arguments.first", TextFormatting.RED, "Required first argument must be '%s' or '%s'.", "ticks", "time"), ARGUMENTS_TIME("arguments.time", TextFormatting.RED, "/%s %s must be followed by '%s' or '%s'.", "simulate", "time", "set", "add"), ARGUMENTS_TIME_AMOUNT("arguments.time.amount", TextFormatting.RED, "/%s %s <%s : %s> must be followed by the time to set or add.", "simulate", "time", "set", "add"), ARGUMENTS_TICKS("arguments.ticks", TextFormatting.RED, "/%s %s must be followed by the number of ticks to simulate.", "simulate", "ticks"), ARGUMENTS_DIMENSION("arguments.dimension", TextFormatting.RED, "Dimension argument is required and must be a valid dimension id, '%s' for the current dimension, or '%s' for all dimensions.", "this", "all"), DIMENSION_SINGLE("dimension.single", "dimension"), DIMENSION_ALL("dimension.all", "all dimensions"), DIMENSION_MISSING_SINGLE("dimension.missing.single", TextFormatting.RED, "No dimension was found with an id of %s."), DIMENSION_MISSING_ALL("dimension.missing.all", TextFormatting.RED, "No dimensions were found."), RESULT_ALL_ENTITIES("result.allentities", "All entities were ticked in %s%s"), RESULT_TILEENTITIES("result.tileentities", "All tile entities were ticked in %s%s"), RESULT_BLOCKS("result.blocks", "Blocks were randomly ticked in all persistent chunks of %s%s"), RESULT_BLOCKS_ALL_ENTITIES("result.blocks.allentities", "All entities were ticked and blocks were randomly ticked in all persistent chunks of %s%s"), RESULT_BLOCKS_TILEENTITIES("result.blocks.tileentities", "All tile entities were ticked and blocks were randomly ticked in all persistent chunks of %s%s"), QUEUE_IN_PROGRESS("queue.inprogress", TextFormatting.RED, "A simulation command is already in progress. Please wait until it is finished."); private String langKey, hardCodedText; private TextFormatting color; private Object[] args; private MessageLang(String langKey, TextFormatting color, String hardCodedText, Object... args) { this(langKey, hardCodedText, args); this.color = color; } private MessageLang(String langKey, String hardCodedText, Object... args) { this.langKey = LangKey.COMMAND_PREFIX + langKey; this.hardCodedText = hardCodedText; this.args = args; } private String getMessageInternal(String langKey) { return Config.commandMessageLocalization ? langKey : hardCodedText; } public String getMessageString() { return getMessageInternal(langKey); } public void sendMessage(ICommandSender sender, Object... args) { sender.sendMessage(getMessage(args)); } public TextComponentTranslation getMessage(Object... args) { TextComponentTranslation message = new TextComponentTranslation(getMessageInternal(langKey), args.length > 0 ? args : this.args); if (color != null) message.getStyle().setColor(color); return message; } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vfs.impl.http; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileSystem; import com.intellij.util.ArrayUtilRt; import com.intellij.util.FileContentUtilCore; import com.intellij.util.SmartList; import com.intellij.util.UriUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; class HttpVirtualFileImpl extends HttpVirtualFile { private final HttpFileSystemBase myFileSystem; @Nullable private final RemoteFileInfoImpl myFileInfo; @Nullable private FileType myInitialFileType; private final String myPath; private final String myParentPath; private final String myName; private List<VirtualFile> myChildren; HttpVirtualFileImpl(@NotNull HttpFileSystemBase fileSystem, @Nullable HttpVirtualFileImpl parent, String path, @Nullable RemoteFileInfoImpl fileInfo) { if (parent != null) { if (parent.myChildren == null) { parent.myChildren = new SmartList<>(); } parent.myChildren.add(this); } myFileSystem = fileSystem; myPath = path; myFileInfo = fileInfo; if (myFileInfo != null) { myFileInfo.addDownloadingListener(new FileDownloadingAdapter() { @Override public void fileDownloaded(@NotNull final VirtualFile localFile) { ApplicationManager.getApplication().invokeLater(() -> { HttpVirtualFileImpl file = HttpVirtualFileImpl.this; FileDocumentManager.getInstance().reloadFiles(file); if (myInitialFileType != null && !FileTypeRegistry.getInstance().isFileOfType(localFile, myInitialFileType)) { FileContentUtilCore.reparseFiles(file); } }); } }); path = UriUtil.trimTrailingSlashes(UriUtil.trimParameters(path)); int lastSlash = path.lastIndexOf('/'); if (lastSlash == -1) { myParentPath = null; myName = path; } else { myParentPath = path.substring(0, lastSlash); myName = path.substring(lastSlash + 1); } } else { int lastSlash = path.lastIndexOf('/'); if (lastSlash == path.length() - 1) { myParentPath = null; myName = path; } else { int prevSlash = path.lastIndexOf('/', lastSlash - 1); myParentPath = path.substring(0, prevSlash < 0 ? lastSlash + 1 : lastSlash); myName = path.substring(lastSlash + 1); } } } @Override @Nullable public RemoteFileInfoImpl getFileInfo() { return myFileInfo; } @Override @NotNull public VirtualFileSystem getFileSystem() { return myFileSystem; } @NotNull @Override public String getPath() { return myPath; } @Override @NotNull public String getName() { return myName; } @Override @NonNls public String toString() { return "HttpVirtualFile:" + myPath + ", info=" + myFileInfo; } @Override public VirtualFile getParent() { return myParentPath == null ? null : myFileSystem.findFileByPath(myParentPath, true); } @Override public boolean isWritable() { return false; } @Override public boolean isValid() { return true; } @Override public boolean isDirectory() { return myFileInfo == null; } @Override public VirtualFile[] getChildren() { return ContainerUtil.isEmpty(myChildren) ? EMPTY_ARRAY : myChildren.toArray(VirtualFile.EMPTY_ARRAY); } @Nullable @Override public VirtualFile findChild(@NotNull @NonNls String name) { if (!ContainerUtil.isEmpty(myChildren)) { for (VirtualFile child : myChildren) { if (StringUtil.equals(child.getNameSequence(), name)) { return child; } } } return null; } @Override @NotNull public FileType getFileType() { if (myFileInfo == null) { return super.getFileType(); } VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getFileType(); } FileType fileType = super.getFileType(); if (myInitialFileType == null) { myInitialFileType = fileType; } return fileType; } @Override public @NotNull InputStream getInputStream() throws IOException { if (myFileInfo != null) { VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getInputStream(); } } throw new UnsupportedOperationException(); } @Override @NotNull public OutputStream getOutputStream(Object requestor, long newModificationStamp, long newTimeStamp) throws IOException { if (myFileInfo != null) { VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.getOutputStream(requestor, newModificationStamp, newTimeStamp); } } throw new UnsupportedOperationException(); } @Override public byte @NotNull [] contentsToByteArray() throws IOException { if (myFileInfo == null) { throw new UnsupportedOperationException(); } VirtualFile localFile = myFileInfo.getLocalFile(); if (localFile != null) { return localFile.contentsToByteArray(); } return ArrayUtilRt.EMPTY_BYTE_ARRAY; } @Override public long getTimeStamp() { return 0; } @Override public long getModificationStamp() { return 0; } @Override public long getLength() { return -1; } @Override public void refresh(final boolean asynchronous, final boolean recursive, final Runnable postRunnable) { if (myFileInfo != null) { myFileInfo.refresh(postRunnable); } else if (postRunnable != null) { postRunnable.run(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://code.google.com/p/google-apis-client-generator/ * (build: 2013-10-30 15:57:41 UTC) * on 2013-11-12 at 20:00:16 UTC * Modify at your own risk. */ package com.demoa.deviceinfoendpoint; /** * Service definition for Deviceinfoendpoint (v1). * * <p> * This is an API * </p> * * <p> * For more information about this service, see the * <a href="" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link DeviceinfoendpointRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class Deviceinfoendpoint extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.16.0-rc of the deviceinfoendpoint library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://myapp.appspot.com/_ah/api/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = "deviceinfoendpoint/v1/"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Deviceinfoendpoint(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ Deviceinfoendpoint(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * Create a request for the method "getDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting any * optional parameters, call the {@link GetDeviceInfo#execute()} method to invoke the remote * operation. * * @param id * @return the request */ public GetDeviceInfo getDeviceInfo(java.lang.String id) throws java.io.IOException { GetDeviceInfo result = new GetDeviceInfo(id); initialize(result); return result; } public class GetDeviceInfo extends DeviceinfoendpointRequest<com.demoa.deviceinfoendpoint.model.DeviceInfo> { private static final String REST_PATH = "deviceinfo/{id}"; /** * Create a request for the method "getDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting * any optional parameters, call the {@link GetDeviceInfo#execute()} method to invoke the remote * operation. <p> {@link GetDeviceInfo#initialize(com.google.api.client.googleapis.services.Abstra * ctGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @param id * @since 1.13 */ protected GetDeviceInfo(java.lang.String id) { super(Deviceinfoendpoint.this, "GET", REST_PATH, null, com.demoa.deviceinfoendpoint.model.DeviceInfo.class); this.id = com.google.api.client.util.Preconditions.checkNotNull(id, "Required parameter id must be specified."); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public GetDeviceInfo setAlt(java.lang.String alt) { return (GetDeviceInfo) super.setAlt(alt); } @Override public GetDeviceInfo setFields(java.lang.String fields) { return (GetDeviceInfo) super.setFields(fields); } @Override public GetDeviceInfo setKey(java.lang.String key) { return (GetDeviceInfo) super.setKey(key); } @Override public GetDeviceInfo setOauthToken(java.lang.String oauthToken) { return (GetDeviceInfo) super.setOauthToken(oauthToken); } @Override public GetDeviceInfo setPrettyPrint(java.lang.Boolean prettyPrint) { return (GetDeviceInfo) super.setPrettyPrint(prettyPrint); } @Override public GetDeviceInfo setQuotaUser(java.lang.String quotaUser) { return (GetDeviceInfo) super.setQuotaUser(quotaUser); } @Override public GetDeviceInfo setUserIp(java.lang.String userIp) { return (GetDeviceInfo) super.setUserIp(userIp); } @com.google.api.client.util.Key private java.lang.String id; /** */ public java.lang.String getId() { return id; } public GetDeviceInfo setId(java.lang.String id) { this.id = id; return this; } @Override public GetDeviceInfo set(String parameterName, Object value) { return (GetDeviceInfo) super.set(parameterName, value); } } /** * Create a request for the method "insertDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting any * optional parameters, call the {@link InsertDeviceInfo#execute()} method to invoke the remote * operation. * * @param content the {@link com.demoa.deviceinfoendpoint.model.DeviceInfo} * @return the request */ public InsertDeviceInfo insertDeviceInfo(com.demoa.deviceinfoendpoint.model.DeviceInfo content) throws java.io.IOException { InsertDeviceInfo result = new InsertDeviceInfo(content); initialize(result); return result; } public class InsertDeviceInfo extends DeviceinfoendpointRequest<com.demoa.deviceinfoendpoint.model.DeviceInfo> { private static final String REST_PATH = "deviceinfo"; /** * Create a request for the method "insertDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting * any optional parameters, call the {@link InsertDeviceInfo#execute()} method to invoke the * remote operation. <p> {@link InsertDeviceInfo#initialize(com.google.api.client.googleapis.servi * ces.AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param content the {@link com.demoa.deviceinfoendpoint.model.DeviceInfo} * @since 1.13 */ protected InsertDeviceInfo(com.demoa.deviceinfoendpoint.model.DeviceInfo content) { super(Deviceinfoendpoint.this, "POST", REST_PATH, content, com.demoa.deviceinfoendpoint.model.DeviceInfo.class); } @Override public InsertDeviceInfo setAlt(java.lang.String alt) { return (InsertDeviceInfo) super.setAlt(alt); } @Override public InsertDeviceInfo setFields(java.lang.String fields) { return (InsertDeviceInfo) super.setFields(fields); } @Override public InsertDeviceInfo setKey(java.lang.String key) { return (InsertDeviceInfo) super.setKey(key); } @Override public InsertDeviceInfo setOauthToken(java.lang.String oauthToken) { return (InsertDeviceInfo) super.setOauthToken(oauthToken); } @Override public InsertDeviceInfo setPrettyPrint(java.lang.Boolean prettyPrint) { return (InsertDeviceInfo) super.setPrettyPrint(prettyPrint); } @Override public InsertDeviceInfo setQuotaUser(java.lang.String quotaUser) { return (InsertDeviceInfo) super.setQuotaUser(quotaUser); } @Override public InsertDeviceInfo setUserIp(java.lang.String userIp) { return (InsertDeviceInfo) super.setUserIp(userIp); } @Override public InsertDeviceInfo set(String parameterName, Object value) { return (InsertDeviceInfo) super.set(parameterName, value); } } /** * Create a request for the method "listDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting any * optional parameters, call the {@link ListDeviceInfo#execute()} method to invoke the remote * operation. * * @return the request */ public ListDeviceInfo listDeviceInfo() throws java.io.IOException { ListDeviceInfo result = new ListDeviceInfo(); initialize(result); return result; } public class ListDeviceInfo extends DeviceinfoendpointRequest<com.demoa.deviceinfoendpoint.model.CollectionResponseDeviceInfo> { private static final String REST_PATH = "deviceinfo"; /** * Create a request for the method "listDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting * any optional parameters, call the {@link ListDeviceInfo#execute()} method to invoke the remote * operation. <p> {@link ListDeviceInfo#initialize(com.google.api.client.googleapis.services.Abstr * actGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @since 1.13 */ protected ListDeviceInfo() { super(Deviceinfoendpoint.this, "GET", REST_PATH, null, com.demoa.deviceinfoendpoint.model.CollectionResponseDeviceInfo.class); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public ListDeviceInfo setAlt(java.lang.String alt) { return (ListDeviceInfo) super.setAlt(alt); } @Override public ListDeviceInfo setFields(java.lang.String fields) { return (ListDeviceInfo) super.setFields(fields); } @Override public ListDeviceInfo setKey(java.lang.String key) { return (ListDeviceInfo) super.setKey(key); } @Override public ListDeviceInfo setOauthToken(java.lang.String oauthToken) { return (ListDeviceInfo) super.setOauthToken(oauthToken); } @Override public ListDeviceInfo setPrettyPrint(java.lang.Boolean prettyPrint) { return (ListDeviceInfo) super.setPrettyPrint(prettyPrint); } @Override public ListDeviceInfo setQuotaUser(java.lang.String quotaUser) { return (ListDeviceInfo) super.setQuotaUser(quotaUser); } @Override public ListDeviceInfo setUserIp(java.lang.String userIp) { return (ListDeviceInfo) super.setUserIp(userIp); } @com.google.api.client.util.Key private java.lang.String cursor; /** */ public java.lang.String getCursor() { return cursor; } public ListDeviceInfo setCursor(java.lang.String cursor) { this.cursor = cursor; return this; } @com.google.api.client.util.Key private java.lang.Integer limit; /** */ public java.lang.Integer getLimit() { return limit; } public ListDeviceInfo setLimit(java.lang.Integer limit) { this.limit = limit; return this; } @Override public ListDeviceInfo set(String parameterName, Object value) { return (ListDeviceInfo) super.set(parameterName, value); } } /** * Create a request for the method "removeDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting any * optional parameters, call the {@link RemoveDeviceInfo#execute()} method to invoke the remote * operation. * * @param id * @return the request */ public RemoveDeviceInfo removeDeviceInfo(java.lang.String id) throws java.io.IOException { RemoveDeviceInfo result = new RemoveDeviceInfo(id); initialize(result); return result; } public class RemoveDeviceInfo extends DeviceinfoendpointRequest<Void> { private static final String REST_PATH = "deviceinfo/{id}"; /** * Create a request for the method "removeDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting * any optional parameters, call the {@link RemoveDeviceInfo#execute()} method to invoke the * remote operation. <p> {@link RemoveDeviceInfo#initialize(com.google.api.client.googleapis.servi * ces.AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param id * @since 1.13 */ protected RemoveDeviceInfo(java.lang.String id) { super(Deviceinfoendpoint.this, "DELETE", REST_PATH, null, Void.class); this.id = com.google.api.client.util.Preconditions.checkNotNull(id, "Required parameter id must be specified."); } @Override public RemoveDeviceInfo setAlt(java.lang.String alt) { return (RemoveDeviceInfo) super.setAlt(alt); } @Override public RemoveDeviceInfo setFields(java.lang.String fields) { return (RemoveDeviceInfo) super.setFields(fields); } @Override public RemoveDeviceInfo setKey(java.lang.String key) { return (RemoveDeviceInfo) super.setKey(key); } @Override public RemoveDeviceInfo setOauthToken(java.lang.String oauthToken) { return (RemoveDeviceInfo) super.setOauthToken(oauthToken); } @Override public RemoveDeviceInfo setPrettyPrint(java.lang.Boolean prettyPrint) { return (RemoveDeviceInfo) super.setPrettyPrint(prettyPrint); } @Override public RemoveDeviceInfo setQuotaUser(java.lang.String quotaUser) { return (RemoveDeviceInfo) super.setQuotaUser(quotaUser); } @Override public RemoveDeviceInfo setUserIp(java.lang.String userIp) { return (RemoveDeviceInfo) super.setUserIp(userIp); } @com.google.api.client.util.Key private java.lang.String id; /** */ public java.lang.String getId() { return id; } public RemoveDeviceInfo setId(java.lang.String id) { this.id = id; return this; } @Override public RemoveDeviceInfo set(String parameterName, Object value) { return (RemoveDeviceInfo) super.set(parameterName, value); } } /** * Create a request for the method "updateDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting any * optional parameters, call the {@link UpdateDeviceInfo#execute()} method to invoke the remote * operation. * * @param content the {@link com.demoa.deviceinfoendpoint.model.DeviceInfo} * @return the request */ public UpdateDeviceInfo updateDeviceInfo(com.demoa.deviceinfoendpoint.model.DeviceInfo content) throws java.io.IOException { UpdateDeviceInfo result = new UpdateDeviceInfo(content); initialize(result); return result; } public class UpdateDeviceInfo extends DeviceinfoendpointRequest<com.demoa.deviceinfoendpoint.model.DeviceInfo> { private static final String REST_PATH = "deviceinfo"; /** * Create a request for the method "updateDeviceInfo". * * This request holds the parameters needed by the the deviceinfoendpoint server. After setting * any optional parameters, call the {@link UpdateDeviceInfo#execute()} method to invoke the * remote operation. <p> {@link UpdateDeviceInfo#initialize(com.google.api.client.googleapis.servi * ces.AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param content the {@link com.demoa.deviceinfoendpoint.model.DeviceInfo} * @since 1.13 */ protected UpdateDeviceInfo(com.demoa.deviceinfoendpoint.model.DeviceInfo content) { super(Deviceinfoendpoint.this, "PUT", REST_PATH, content, com.demoa.deviceinfoendpoint.model.DeviceInfo.class); } @Override public UpdateDeviceInfo setAlt(java.lang.String alt) { return (UpdateDeviceInfo) super.setAlt(alt); } @Override public UpdateDeviceInfo setFields(java.lang.String fields) { return (UpdateDeviceInfo) super.setFields(fields); } @Override public UpdateDeviceInfo setKey(java.lang.String key) { return (UpdateDeviceInfo) super.setKey(key); } @Override public UpdateDeviceInfo setOauthToken(java.lang.String oauthToken) { return (UpdateDeviceInfo) super.setOauthToken(oauthToken); } @Override public UpdateDeviceInfo setPrettyPrint(java.lang.Boolean prettyPrint) { return (UpdateDeviceInfo) super.setPrettyPrint(prettyPrint); } @Override public UpdateDeviceInfo setQuotaUser(java.lang.String quotaUser) { return (UpdateDeviceInfo) super.setQuotaUser(quotaUser); } @Override public UpdateDeviceInfo setUserIp(java.lang.String userIp) { return (UpdateDeviceInfo) super.setUserIp(userIp); } @Override public UpdateDeviceInfo set(String parameterName, Object value) { return (UpdateDeviceInfo) super.set(parameterName, value); } } /** * Builder for {@link Deviceinfoendpoint}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); } /** Builds a new instance of {@link Deviceinfoendpoint}. */ @Override public Deviceinfoendpoint build() { return new Deviceinfoendpoint(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link DeviceinfoendpointRequestInitializer}. * * @since 1.12 */ public Builder setDeviceinfoendpointRequestInitializer( DeviceinfoendpointRequestInitializer deviceinfoendpointRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(deviceinfoendpointRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
package io.katharsis.jpa.repository; import java.util.Arrays; import java.util.List; import org.hibernate.Hibernate; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.springframework.transaction.annotation.Transactional; import io.katharsis.jpa.JpaEntityRepository; import io.katharsis.jpa.internal.paging.PagedMetaInformation; import io.katharsis.jpa.model.RelatedEntity; import io.katharsis.jpa.model.TestEntity; import io.katharsis.jpa.query.AbstractJpaTest; import io.katharsis.queryspec.Direction; import io.katharsis.queryspec.FilterOperator; import io.katharsis.queryspec.FilterSpec; import io.katharsis.queryspec.QuerySpec; import io.katharsis.queryspec.SortSpec; @Transactional public abstract class JpaEntityRepositoryTestBase extends AbstractJpaTest { private JpaEntityRepository<TestEntity, Long> repo; @Override @Before public void setup() { super.setup(); repo = new JpaEntityRepository<>(module, TestEntity.class); } @Test public void testGetEntityType() throws InstantiationException, IllegalAccessException { Assert.assertEquals(TestEntity.class, repo.getResourceClass()); } @Test public void testFindAll() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(numTestEntities, list.size()); } @Test public void testFindAllOrderByAsc() throws InstantiationException, IllegalAccessException { testFindAllOrder(true); } @Test public void testFindAllOrderByDesc() throws InstantiationException, IllegalAccessException { testFindAllOrder(false); } public void testFindAllOrder(boolean asc) throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addSort(new SortSpec(Arrays.asList("longValue"), asc ? Direction.ASC : Direction.DESC)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(numTestEntities, list.size()); for (int i = 0; i < numTestEntities; i++) { if (asc) { Assert.assertEquals(i, list.get(i).getLongValue()); } else { Assert.assertEquals(numTestEntities - 1 - i, list.get(i).getLongValue()); } } } @Test public void testFilterString() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("stringValue"), FilterOperator.EQ, "test1")); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); TestEntity entity = list.get(0); Assert.assertEquals("test1", entity.getStringValue()); } @Test public void testFilterLong() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.EQ, 2L)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); TestEntity entity = list.get(0); Assert.assertEquals(2, entity.getId().longValue()); Assert.assertEquals(2L, entity.getLongValue()); } @Test public void testFilterInt() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("embValue", "embIntValue"), FilterOperator.EQ, 2)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); TestEntity entity = list.get(0); Assert.assertEquals(2L, entity.getId().longValue()); Assert.assertEquals(2, entity.getEmbValue().getEmbIntValue().intValue()); } @Test public void testFilterBooleanTrue() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("embValue", "nestedValue", "embBoolValue"), FilterOperator.EQ, true)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); TestEntity entity = list.get(0); Assert.assertTrue(entity.getEmbValue().getNestedValue().getEmbBoolValue()); } @Test public void testFilterBooleanFalse() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("embValue", "nestedValue", "embBoolValue"), FilterOperator.EQ, false)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(numTestEntities - 1, list.size()); for (TestEntity entity : list) { Assert.assertFalse(entity.getEmbValue().getNestedValue().getEmbBoolValue()); } } @Test public void testFilterEquals() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.EQ, 2L)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); } @Test public void testFilterNotEquals() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.NEQ, 2L)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(4, list.size()); } @Test public void testFilterLess() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.LT, 2)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(2, list.size()); } @Test public void testFilterLessEqual() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.LE, 2)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(3, list.size()); } @Test public void testFilterGreater() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.GT, 1)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(3, list.size()); } @Test public void testFilterGreaterEqual() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("longValue"), FilterOperator.GE, 1)); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(4, list.size()); } @Test public void testFilterLike() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("stringValue"), FilterOperator.LIKE, "test2")); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); } @Test public void testFilterLikeWildcards() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("stringValue"), FilterOperator.LIKE, "test%")); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(5, list.size()); } @Test public void testPaging() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.setOffset(2L); querySpec.setLimit(2L); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(2, list.size()); Assert.assertEquals(2, list.get(0).getId().intValue()); Assert.assertEquals(3, list.get(1).getId().intValue()); PagedMetaInformation metaInformation = repo.getMetaInformation(list, querySpec); Assert.assertEquals(5, metaInformation.getTotalResourceCount().longValue()); } @Test public void testPagingFirst() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.setOffset(0L); querySpec.setLimit(3L); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(3, list.size()); Assert.assertEquals(0, list.get(0).getId().intValue()); Assert.assertEquals(1, list.get(1).getId().intValue()); Assert.assertEquals(2, list.get(2).getId().intValue()); PagedMetaInformation metaInformation = repo.getMetaInformation(list, querySpec); Assert.assertEquals(5, metaInformation.getTotalResourceCount().longValue()); } @Test public void testPagingLast() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.setOffset(4L); querySpec.setLimit(4L); List<TestEntity> list = repo.findAll(querySpec); Assert.assertEquals(1, list.size()); Assert.assertEquals(4, list.get(0).getId().intValue()); PagedMetaInformation metaInformation = repo.getMetaInformation(list, querySpec); Assert.assertEquals(5, metaInformation.getTotalResourceCount().longValue()); } @Test public void testIncludeNoRelations() throws InstantiationException, IllegalAccessException { em.clear(); List<TestEntity> list = repo.findAll(new QuerySpec(TestEntity.class)); Assert.assertEquals(numTestEntities, list.size()); for (TestEntity entity : list) { RelatedEntity relatedValue = entity.getOneRelatedValue(); if (relatedValue != null) Assert.assertFalse(Hibernate.isInitialized(relatedValue)); } } @Test(expected = Exception.class) public void testFilterUnknownAttr() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addFilter(new FilterSpec(Arrays.asList("test"), FilterOperator.EQ, "test")); repo.findAll(querySpec); } @Test(expected = Exception.class) public void testSparseFieldSetNotSupported() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.includeField(Arrays.asList("test")); repo.findAll(querySpec); } @Test(expected = Exception.class) public void testSortUnknownAttr() throws InstantiationException, IllegalAccessException { QuerySpec querySpec = new QuerySpec(TestEntity.class); querySpec.addSort(new SortSpec(Arrays.asList("test"), Direction.DESC)); repo.findAll(querySpec); } @Test(expected = UnsupportedOperationException.class) public void testReadableFindAll() { repo.setReadable(false); repo.findAll(new QuerySpec(TestEntity.class)); } @Test(expected = UnsupportedOperationException.class) public void testReadableFindOne() { repo.setReadable(false); repo.findOne(1L, new QuerySpec(TestEntity.class)); } @Test(expected = UnsupportedOperationException.class) public void testReadableFindAllByIds() { repo.setReadable(false); repo.findAll(Arrays.asList(1L), new QuerySpec(TestEntity.class)); } @Test(expected = UnsupportedOperationException.class) public void testUpdateable() { List<TestEntity> list = repo.findAll(new QuerySpec(TestEntity.class)); TestEntity entity = list.get(0); repo.setUpdateable(false); repo.save(entity); } @Test(expected = UnsupportedOperationException.class) public void testDeletable() { repo.setDeleteable(false); repo.delete(1L); } @Test(expected = UnsupportedOperationException.class) public void testCreatable() { repo.setCreateable(false); repo.save(new TestEntity()); } }
package pk.contender.earmouse; import android.app.Activity; import android.app.Fragment; import android.content.Context; import android.content.SharedPreferences; import android.net.http.AndroidHttpClient; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.message.BasicHttpRequest; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; /** * Implements a detailed view and installation option of user-selected Module in {@link pk.contender.earmouse.ModuleManagerActivity} * * @author Paul Klinkenberg <pklinken.development@gmail.com> */ public class ManagerDetailsFragment extends Fragment { /* SharedPreferences constants */ private static final String PREFERENCES_MANAGERDETAILSFRAGMENT_ID = "preferences_ManagerDetailsFragment_id"; private Context mCtx; /** The user-selected Module */ private Module mod = null; /** ID of the selected Module, used for fetching and storing * @see Module#id */ private int id = -1; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_managerdetail, container, false); mCtx = getActivity(); if(mCtx == null) Log.d("DEBUG", "Context is null in ManagerDetailsFragment onCreate()"); return view; } /** * Restore fragment state. */ @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if(getActivity().getClass() != ManagerDetailActivity.class) { // When running on a tablet restore the saved ID, on a handheld the activity will be reconstructed from the Intent. SharedPreferences settings = mCtx.getSharedPreferences(Main.PREFS_NAME, Activity.MODE_PRIVATE); id = settings.getInt(PREFERENCES_MANAGERDETAILSFRAGMENT_ID, -1); // TODO: Would refetch a loaded module on screen rotate when saving the state would be sufficient. update(); } } /** * Save fragment state. */ @Override public void onPause() { super.onPause(); SharedPreferences settings = mCtx.getSharedPreferences(Main.PREFS_NAME, Activity.MODE_PRIVATE); settings.edit().putInt(PREFERENCES_MANAGERDETAILSFRAGMENT_ID, id).apply(); } void setId(int id) { this.id = id; } /** * Update UI, either sets the UI to the empty UI or fetches the selected Module from the server. */ void update() { if(id < 0) { setEmpty(); } else { new FetchModuleJsonFromServer().execute(); } } /** * Installs the selected Module. After installation displays a Toast message and tells {@link pk.contender.earmouse.ModuleManagerActivity} * to update itself. * @param view The View that received the click event */ public void onButtonClick(@SuppressWarnings("UnusedParameters") View view) { if(mod != null){ // User request to install this module if(mod.writeModuleToJson()) { Toast toast = Toast.makeText(mCtx, mCtx.getString(R.string.toast_module_installed), Toast.LENGTH_LONG); toast.show(); // FIXME: Should check for null here, apparently static isn't so holy. for(Module mod : ModuleManagerActivity.shownModuleList) { if(mod.getId() == id) { ModuleManagerActivity.mAdapter.remove(mod); ModuleManagerActivity.mAdapter.notifyDataSetChanged(); break; } } mod = null; id = -1; //update(); } else { Toast toast = Toast.makeText(mCtx, mCtx.getString(R.string.toast_error_installing_module), Toast.LENGTH_LONG); toast.show(); } } } /** * Set the UI to only display a 'no module selected' message */ void setEmpty() { TextView titleView = (TextView) getActivity().findViewById(R.id.module_title); TextView descriptionView = (TextView) getActivity().findViewById(R.id.module_description); View divider = getActivity().findViewById(R.id.button_divider); Button button = (Button) getActivity().findViewById(R.id.manager_button); TextView messageView = (TextView) getActivity().findViewById(R.id.message_text); if(titleView != null) { titleView.setText(""); titleView.setVisibility(View.GONE); } if(descriptionView != null) { descriptionView.setText(""); descriptionView.setVisibility(View.GONE); } if(divider != null) { divider.setVisibility(View.GONE); } if(button != null) { button.setText(""); button.setVisibility(View.GONE); } if(messageView != null) messageView.setVisibility(View.VISIBLE); mod = null; } /** * Set the UI to display the standard layout. */ void setNotEmpty() { TextView titleView = (TextView) getActivity().findViewById(R.id.module_title); TextView descriptionView = (TextView) getActivity().findViewById(R.id.module_description); View divider = getActivity().findViewById(R.id.button_divider); Button button = (Button) getActivity().findViewById(R.id.manager_button); TextView messageView = (TextView) getActivity().findViewById(R.id.message_text); if(titleView != null) { titleView.setText(""); titleView.setVisibility(View.VISIBLE); } if(descriptionView != null) { descriptionView.setText(""); descriptionView.setVisibility(View.VISIBLE); } if(divider != null) { divider.setVisibility(View.VISIBLE); } if(button != null) { button.setText(""); button.setVisibility(View.VISIBLE); } if(messageView != null) messageView.setVisibility(View.GONE); } /** * Contacts a remote host, fetches the Module JSON for {@link ManagerDetailsFragment#id}, * loads its data and sets up the UI. */ private class FetchModuleJsonFromServer extends AsyncTask<Void, Void, Module> { private AndroidHttpClient httpClient = null; private Context mCtx; @Override protected void onPostExecute(Module result) { if(httpClient != null) httpClient.close(); if(result == null) { Toast toast = Toast.makeText(mCtx, mCtx.getResources().getText(R.string.http_received_empty), Toast.LENGTH_LONG); toast.show(); return; } mod = result; setNotEmpty(); Activity callingActivity = getActivity(); if(callingActivity != null) { // Can't use mCtx here as Activity may be destroyed once we reach this point TextView titleView = (TextView) callingActivity.findViewById(R.id.module_title); TextView descriptionView = (TextView) callingActivity.findViewById(R.id.module_description); if (titleView != null) titleView.setText(mod.getTitle()); if (descriptionView != null) descriptionView.setText(mod.getDescription()); Button button = (Button) callingActivity.findViewById(R.id.manager_button); if (button != null) { button.setText(callingActivity.getResources().getText(R.string.manager_install_button)); } else Log.d("DEBUG", "Button is null"); } else Log.d("DEBUG", "callingActivity is null in onPostExecute in FetchModuleJsonFromServer"); } @Override protected void onCancelled(Module result) { // Task was cancelled so we cannot display the module, end activity and display a Toast message if(httpClient != null) httpClient.close(); Activity callingActivity = getActivity(); if(callingActivity != null) { // Can't use mCtx here as Activity may be destroyed once we reach this point Toast toast = Toast.makeText(callingActivity, mCtx.getResources().getText(R.string.http_error), Toast.LENGTH_LONG); toast.show(); id = -1; setEmpty(); } } // TODO: Set up a progressbar to display while contacting a server. @Override protected Module doInBackground(Void... params) { HttpURLConnection urlConn = null; URL url = null; String localizedModulePath = "module" + Main.getLocaleSuffix() + "_" + id + ".json"; try { url = new URL(Main.generateModuleUrl(mCtx) + localizedModulePath); } catch (MalformedURLException e) { e.printStackTrace(); cancel(false); } try { assert url != null; urlConn = (HttpURLConnection) url.openConnection(); // Limit the time the user sits and waits for a malformed custom URL to timeout urlConn.setConnectTimeout(5000); } catch (IOException e) { e.printStackTrace(); cancel(false); } /* httpClient = AndroidHttpClient.newInstance("Earmouse/" + Main.VERSION); HttpHost host = new HttpHost(Main.SERVER_HOST, Main.SERVER_PORT); String localizedModulePath = Main.SERVER_PATH + "module" + Main.getLocaleSuffix() + "_" + id + ".json"; BasicHttpRequest request = new BasicHttpRequest("GET", localizedModulePath); HttpResponse response; try { response = httpClient.execute(host, request); } catch (IOException e) { e.printStackTrace(); cancel(false); return null; } HttpEntity entity; if(response != null) { entity = response.getEntity(); } else { cancel(false); return null; } if(entity != null) { Log.d("DEBUG", "entity.getContentLength() returns " + entity.getContentLength()); } else { cancel(false); return null; } */ InputStreamReader reader; try { // reader = new InputStreamReader(entity.getContent()); reader = new InputStreamReader(urlConn.getInputStream()); } catch (IllegalStateException | IOException e) { e.printStackTrace(); cancel(false); return null; } Module result = new Module(mCtx, reader); try { reader.close(); } catch (IOException e) { e.printStackTrace(); } return result; } @Override protected void onPreExecute() { mCtx = getActivity(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tamaya.core.internal; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.security.cert.X509Certificate; import java.util.Dictionary; import java.util.Enumeration; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Vector; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.framework.ServiceReference; import org.osgi.framework.Version; /** * * @author William.Lieurance 2018-02-05 */ public class MockBundle implements Bundle { private int state = Bundle.ACTIVE; @Override public int getState() { return state; } public void setState(int state) { this.state = state; } @Override public void start(int i) throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void start() throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void stop(int i) throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void stop() throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void update(InputStream in) throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void update() throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public void uninstall() throws BundleException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public Dictionary<String, String> getHeaders() { return new Hashtable<>(); } private long bundleId = 1L; @Override public long getBundleId() { return bundleId; } public void setBundleId(long bundleId) { this.bundleId = bundleId; } @Override public String getLocation() { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public ServiceReference<?>[] getRegisteredServices() { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public ServiceReference<?>[] getServicesInUse() { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public boolean hasPermission(Object o) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public URL getResource(String string) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public Dictionary<String, String> getHeaders(String string) { return new Hashtable<>(); } @Override public String getSymbolicName() { return "MockBundle"; } @Override public Class<?> loadClass(String string) throws ClassNotFoundException { if (string.contains("org.something.else") || string.endsWith("/")) { throw new UnsupportedOperationException("Requested class that should not be requested: " + string); } return String.class; } @Override public Enumeration<URL> getResources(String string) throws IOException { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public Enumeration<String> getEntryPaths(String string) { Vector<String> v = new Vector<>(); v.add("META-INF/services/" + "someslash/"); v.add("META-INF/services/" + "org.apache.tamaya"); v.add("META-INF/services/" + "org.something.else"); return v.elements(); } @Override public URL getEntry(String string) { if (string.equals("META-INF/services/")) { try { return new URL("file:///"); } catch (MalformedURLException ex) { return null; } } if (string.contains("org.something.else") || string.endsWith("/")) { throw new UnsupportedOperationException("Requested entry that should not be requested: " + string); } return getClass().getClassLoader().getResource("mockbundle.service"); } @Override public long getLastModified() { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public Enumeration<URL> findEntries(String string, String string1, boolean bln) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } private BundleContext bundleContext = new MockBundleContext(); @Override public BundleContext getBundleContext() { return bundleContext; } public void setBundleContext(BundleContext bundleContext) { this.bundleContext = bundleContext; } @Override public Map<X509Certificate, List<X509Certificate>> getSignerCertificates(int i) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public Version getVersion() { return new Version(0, 0, 1); } @Override public <A> A adapt(Class<A> type) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public File getDataFile(String string) { throw new UnsupportedOperationException("Not supported (MockBundle)"); } @Override public int compareTo(Bundle o) { return Long.compare(this.getBundleId(), o.getBundleId()); } }
/** * Copyright "TBD", Metron Aviation & CSSI. All rights reserved. * * This computer Software was developed with the sponsorship of the U.S. Government * under Contract No. DTFAWA-10-D-00033, which has a copyright license in accordance with AMS 3.5-13.(c)(1). */ package gov.faa.ang.swac.common.flightmodeling; import gov.faa.ang.swac.common.datatypes.REGION; import gov.faa.ang.swac.common.datatypes.Timestamp; import gov.faa.ang.swac.common.entities.Carrier; import gov.faa.ang.swac.common.flightmodeling.FlightLeg.ModeledState; import gov.faa.ang.swac.common.flightmodeling.jni.Airframe; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Represents an IFR (Instrument Flight Rules) itinerary, that is... an {@link Aircraft} and a list of IFR {@link FlightLeg}s.<p> * For an itinerary with a single VFR (Visual Flight Rules) flight plan, see {@link ItineraryVFR}. * @see ItineraryVFR * @author Jason Femino - CSSI, Inc. */ public class Itinerary implements Airframe, Serializable, Cloneable { public enum Type { IFR, VFR_DEP, VFR_ARR, VFR_DEP_ARR } //--------------------- // Static class members //--------------------- // toString related members public static final String SEP = ","; public static final String TEXT_RECORD_KEY = "ITINERARY: Itinerary Type, NASPAC Itinerary Number, ID, Flight Plan Count\n" + Aircraft.TEXT_RECORD_KEY + "\n" + FlightLeg.TEXT_RECORD_KEY + "\n" + "..."; //----------------------- // Instance class members //----------------------- public Aircraft aircraft = null; public Type type = null; private REGION region; public Itinerary(Aircraft aircraft, Type type) { super(); this.type = type; this.flightList = new ArrayList<FlightLeg>(); this.setAircraft(new Aircraft(aircraft)); } public Itinerary(Aircraft aircraft, FlightLeg flightLeg, Type type) { this(aircraft, type); this.addFlightLeg(new FlightLeg(flightLeg)); } public Itinerary(Itinerary org) { this.aircraft = (org.aircraft == null ? null : org.aircraft.clone()); this.dayOverrideFlag = org.dayOverrideFlag; this.evolvedEtmsAircraftCategory = org.evolvedEtmsAircraftCategory; this.filedEtmsAircraftCategory = org.filedEtmsAircraftCategory; this.gateDesignGroupMask = org.gateDesignGroupMask; this.pushbackCategory = (org.pushbackCategory == null ? null : org.pushbackCategory.intValue()); this.region = org.region; this.rerouteClearanceCategory = (org.rerouteClearanceCategory == null ? null : org.rerouteClearanceCategory.intValue()); this.taxiInCategory = (org.taxiInCategory == null ? null : org.taxiInCategory.intValue()); this.taxiOutCategory = (org.taxiOutCategory == null ? null : org.taxiOutCategory.intValue()); this.turnAroundCategory = (org.turnAroundCategory == null ? null : org.turnAroundCategory.intValue()); this.rampCategory = (org.rampCategory == null ? null : org.rampCategory.intValue()); this.type = org.type; this.flightList = new ArrayList<FlightLeg>(org.flightList.size()); for (FlightLeg fl : org.flightList) { this.flightList.add(fl.clone()); } } public void addFlightLeg(FlightLeg flightLeg) { this.flightList.add(flightLeg); flightLeg.setParentAirframe(this); } public void removeFlightLeg(int index) { FlightLeg flightLeg = this.flightList.remove(index); flightLeg.setParentAirframe(null); } @Override public boolean equals(Object o) { if (!(o instanceof Itinerary)) { return false; } Itinerary itinerary = (Itinerary)o; if (this.type != itinerary.type) { return false; } // Compare List objects if ((this.flightList == null && itinerary.flightList != null) || (this.flightList != null && itinerary.flightList == null) || (this.flightList != null && itinerary.flightList != null && this.flightList.size() != itinerary.flightList.size()) ) { return false; } // Both lists are null (or non-null, and have the same number of objects) if (this.flightList != null) { // Compare lists, object-by-object for (int i=0; i<this.flightLegs().size(); i++) { if (!this.flightList.get(i).equals(itinerary.flightList.get(i))) { return false; } } } return true; } /** * Gets the {@link Type} of this {@link ItineraryVFR}'s {@link FlightLeg}.<p> * @see {@link FlightLeg#vfrType(FlightLeg)} */ public Type type() { return this.type; } /** * Is this a VFR Or not? * */ public boolean isVFR() { return (this.type != Type.IFR); } /** * Gets the unique serial number given to each {@link Itinerary} object upon creation.<p> * <b>Not to be confused with NASPAC {@link #itineraryNumber}.</b> */ public Integer id() { return this.flightList.get(0).flightId(); } /** * Sets the NASPAC itinerary number. */ public List<FlightLeg> flightLegs() { return Collections.unmodifiableList(this.flightList); } public Aircraft aircraft() { return this.aircraft; } public void setAircraft(Aircraft aircraft) { this.aircraft = aircraft; } public REGION getRegion() { return region; } public void setRegion(REGION region) { this.region = region; } /** * Sets the {@link FlightLeg}s for this {@link ItineraryIFR}. */ public void setFlightLegs(List<FlightLeg> flightLegs) { if (this.flightList != null) { this.flightList.clear(); } else { this.flightList = new ArrayList<FlightLeg>(flightLegs.size()); } this.flightList.addAll(flightLegs); for (FlightLeg flightLeg : flightLegs) { flightLeg.setParentAirframe(this); } } @Override public String toString() { StringBuilder output = new StringBuilder(); output.append("ITINERARY: " + (this.type == null ? "" : this.type) + SEP + " " + this.id() + SEP + " " + (this.flightList == null ? "0" : this.flightList.size()) + "\n" + (this.aircraft == null ? "" : this.aircraft.toString()) + "\n"); if ( this.flightList != null ) { for (int i=0; i<this.flightList.size(); i++) { output.append(this.flightList.get(i).toString()); // Don't append a newline for the last line if ( i < this.flightList.size()-1 ) { output.append("\n"); } } } return output.toString(); } /** * Splits {@link Itinerary} into two, returning a new object similar to the original... * except that the {@link FlightLeg}s before "flightLegIndex" will remain with the * original object, and {@link FlightLeg}s at and after "flightLegIndex" will belong to the new object.<p> * This method will have no effect (return <code>null</code>) if the original {@link Itinerary} has 0 {@link FlightLeg}s. * @param flightLegIndex * @return new {@link Itinerary} (or <code>null</code>) */ public Itinerary split(int flightLegIndex) { // Only valid for IFR itineraries if (this.type != Type.IFR) { return null; } if (this.flightList.size() > 0 && flightLegIndex >= 0 && flightLegIndex < this.flightList.size()) { // Copy all FlightLegs from index to the end List<FlightLeg> newFlightLegs = new ArrayList<FlightLeg>( this.flightList.size() - flightLegIndex + 1 ); int originalFlightLegCount = this.flightList.size(); for (int i=flightLegIndex; i<originalFlightLegCount; i++) { newFlightLegs.add( this.flightList.get(flightLegIndex) ); this.flightList.remove(flightLegIndex); } // Create a new Itinerary Itinerary newItinerary = new Itinerary(this.aircraft, Type.IFR); newItinerary.setFlightLegs(newFlightLegs); return newItinerary; } return null; } protected String filedEtmsAircraftCategory; protected String evolvedEtmsAircraftCategory; public String getFiledEtmsAircraftCategory() { return filedEtmsAircraftCategory; } public void setFiledEtmsAircraftCategory(String filedEtmsAircraftCategory) { this.filedEtmsAircraftCategory = filedEtmsAircraftCategory; } public String getEvolvedEtmsAircraftCategory() { return evolvedEtmsAircraftCategory; } public void setEvolvedEtmsAircraftCategory(String evolvedEtmsAircraftCategory) { this.evolvedEtmsAircraftCategory = evolvedEtmsAircraftCategory; } public Itinerary(Itinerary itineraryVfr, Type printType) { this(itineraryVfr.aircraft(), itineraryVfr.type()); if ( itineraryVfr.flightLegs() == null || itineraryVfr.flightLegs().size() == 0 ) { throw new IllegalStateException(); } // Copy and modify the original flight leg and itinerary FlightLeg leg = new FlightLeg(itineraryVfr.flightList.get(0)); this.addFlightLeg(leg); String airline = null; if (this.aircraft().atoUserClass().toLowerCase().contains("military")) { airline = "ml "; } else { airline = "ga "; } this.aircraft().setCarrierId(airline); this.aircraft().setEquipmentSuffix(new EquipmentSuffix()); TrajectoryPoint tp = new TrajectoryPoint(); tp.setTimeToNextResource(meanFlyingTimeMins * 60 * 1000); tp.setResourceInfo(leg.departure()); if ( (printType == Type.VFR_DEP || printType == Type.VFR_DEP_ARR) && (this.type() == Type.VFR_DEP || this.type() == Type.VFR_DEP_ARR) ) { Terminus departure = this.flightLegs().get(0).departure(); Timestamp depTime = departure.runwayDateTime(); leg.departure().setGateDateTime(depTime); // TODO: is this right? do VFR's skip surface modeling? IFR's use gate time and VFR's use runway? leg.arrival().setRunwayDateTime(depTime.minuteAdd(depArrTimeDiffMins)); leg.arrival().setGateDateTime(depTime.minuteAdd(depArrTimeDiffMins)); leg.arrival().setAirportName("????"); } if ( (printType == Type.VFR_ARR || printType == Type.VFR_DEP_ARR) && (this.type() == Type.VFR_ARR || this.type() == Type.VFR_DEP_ARR) ) { leg.arrival().setGateDateTime(leg.arrival().runwayDateTime()); leg.departure().setGateDateTime(leg.arrival().runwayDateTime().minuteAdd(-1 * depArrTimeDiffMins)); leg.departure().setRunwayDateTime(leg.arrival().runwayDateTime().minuteAdd(-1 * depArrTimeDiffMins)); leg.departure().setAirportName("????"); } tp.setTimestamp(leg.departure().gateDateTime()); leg.setCrossings(Arrays.asList(new TrajectoryPoint[] { tp }), false); leg.setModeledState(ModeledState.CROSSINGS); } public static final int depArrTimeDiffMins = 13; // difference between dep and arr times in minutes public static final int meanFlyingTimeMins = 8; private Integer turnAroundCategory = null; private Integer pushbackCategory = null; private Integer taxiOutCategory = null; private Integer taxiInCategory = null; private Integer rerouteClearanceCategory = null; private Integer rampCategory = null; private boolean dayOverrideFlag = false; private List<FlightLeg> flightList = null; private String gateDesignGroupMask; @Override public Integer airframeId() { return this.id(); } @Override public Timestamp activationTimestamp() { return this.flightList.get(0).departure().gateDateTime(); } @Override public Integer tailNumber() { return this.id(); } @Override public String airlineIndicator() { return this.aircraft().carrierId(); } public Carrier getCarrier() { return this.aircraft().getCarrier(); } public void setCarrier(Carrier carrier) { this.aircraft.setCarrier(carrier); } @Override public String aircraftType() { String acTypeStr = this.aircraft().filedBadaAircraftType(); // XXX: CSS 7/6/2011 Minor business rule change (sim always uses BADA AC type). May have impact on regression testing return acTypeStr == null || acTypeStr.isEmpty() || acTypeStr.equals("HELI") ? "----" : acTypeStr; } @Override public EquipmentSuffix equipmentSuffix() { return this.aircraft().equipmentSuffix(); } @Override public String equipmentSuffixString() { return this.equipmentSuffix().toString(); } @Override public Integer turnAroundCategory() { return turnAroundCategory; } public void setTurnAroundCategory(Integer turnAroundCategory) { this.turnAroundCategory = turnAroundCategory; } @Override public Integer pushbackCategory() { return pushbackCategory; } public void setPushbackCategory(Integer pushbackCategory) { this.pushbackCategory = pushbackCategory; } @Override public Integer taxiOutCategory() { return taxiOutCategory; } public void setTaxiOutCategory(Integer taxiOutCategory) { this.taxiOutCategory = taxiOutCategory; } @Override public Integer taxiInCategory() { return taxiInCategory; } public void setTaxiInCategory(Integer taxiInCategory) { this.taxiInCategory = taxiInCategory; } public Integer rerouteClearanceCategory() { return rerouteClearanceCategory; } public void setRerouteClearanceCategory(Integer rerouteClearanceCategory) { this.rerouteClearanceCategory = rerouteClearanceCategory; } public Integer rampCategory() { return rampCategory; } public void setRampCategory(Integer rampCategory) { this.rampCategory = rampCategory; } @Override public boolean dayOverrideFlag() { return dayOverrideFlag; } public void setDayOverrideFlag(boolean flag) { dayOverrideFlag = flag; } @Override public List<FlightLeg> flightList() { return Collections.unmodifiableList(flightList); } public String getGateDesignGroupMask() { return gateDesignGroupMask; } public void setGateDesignGroupMask(String gateDesignGroupMask) { this.gateDesignGroupMask = gateDesignGroupMask; } @Override public int compareTo(Airframe o) { int retVal = this.activationTimestamp().compareTo(o.activationTimestamp()); if (retVal == 0) { return this.id().compareTo(o.airframeId()); } return retVal; } // @Override // public String toString() { // return "Airframe [airframeId=" + airframeId() + ", activationTime=" // + activationTime() + ", activationTimestamp=" // + activationTimestamp() + ", tailNumber=" + tailNumber() // + ", airlineIndicator=" + airlineIndicator() + ", aircraftType=" // + aircraftType() + ", equipmentSuffix=" + equipmentSuffix() // + ", equipmentSuffixString=" + equipmentSuffixString() // + ", turnAroundCategory=" + turnAroundCategory // + ", pushbackCategory=" + pushbackCategory // + ", taxiOutCategory=" + taxiOutCategory + ", taxiInCategory=" // + taxiInCategory + ", flightList=" + flightList // + "]"; // } @Override public void readItem(BufferedReader reader) throws IOException { throw new UnsupportedOperationException(); } @Override public void writeItem(PrintWriter writer) throws IOException { writer.println(this.toString()); } @Override public Itinerary clone() { return new Itinerary(this); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.incremental; import com.carrotsearch.junitbenchmarks.AbstractBenchmark; import com.carrotsearch.junitbenchmarks.BenchmarkOptions; import com.carrotsearch.junitbenchmarks.Clock; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesQueryEngine; import io.druid.query.timeseries.TimeseriesQueryQueryToolChest; import io.druid.query.timeseries.TimeseriesQueryRunnerFactory; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.Segment; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** * Extending AbstractBenchmark means only runs if explicitly called */ @RunWith(Parameterized.class) public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark { private static AggregatorFactory[] factories; static final int dimensionCount = 5; static { final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1); ingestAggregatorFactories.add(new CountAggregatorFactory("rows")); for (int i = 0; i < dimensionCount; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( String.format("sumResult%s", i), String.format("Dim_%s", i) ) ); ingestAggregatorFactories.add( new DoubleSumAggregatorFactory( String.format("doubleSumResult%s", i), String.format("Dim_%s", i) ) ); } factories = ingestAggregatorFactories.toArray(new AggregatorFactory[0]); } private static final class MapIncrementalIndex extends OnheapIncrementalIndex { private final AtomicInteger indexIncrement = new AtomicInteger(0); ConcurrentHashMap<Integer, Aggregator[]> indexedMap = new ConcurrentHashMap<Integer, Aggregator[]>(); public MapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, boolean deserializeComplexMetrics, boolean reportParseExceptions, boolean concurrentEventAdd, boolean sortFacts, int maxRowCount ) { super( incrementalIndexSchema, deserializeComplexMetrics, reportParseExceptions, concurrentEventAdd, sortFacts, maxRowCount ); } public MapIncrementalIndex( long minTimestamp, Granularity gran, AggregatorFactory[] metrics, int maxRowCount ) { super( new IncrementalIndexSchema.Builder() .withMinTimestamp(minTimestamp) .withQueryGranularity(gran) .withMetrics(metrics) .build(), true, true, false, true, maxRowCount ); } @Override protected Aggregator[] concurrentGet(int offset) { // All get operations should be fine return indexedMap.get(offset); } @Override protected void concurrentSet(int offset, Aggregator[] value) { indexedMap.put(offset, value); } @Override protected Integer addToFacts( AggregatorFactory[] metrics, boolean deserializeComplexMetrics, boolean reportParseExceptions, InputRow row, AtomicInteger numEntries, TimeAndDims key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier ) throws IndexSizeExceededException { final Integer priorIdex = getFacts().getPriorIndex(key); Aggregator[] aggs; if (null != priorIdex) { aggs = indexedMap.get(priorIdex); } else { aggs = new Aggregator[metrics.length]; for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; aggs[i] = agg.factorize( makeColumnSelectorFactory(agg, rowSupplier, deserializeComplexMetrics) ); } Integer rowIndex; do { rowIndex = indexIncrement.incrementAndGet(); } while (null != indexedMap.putIfAbsent(rowIndex, aggs)); // Last ditch sanity checks if (numEntries.get() >= maxRowCount && getFacts().getPriorIndex(key) == TimeAndDims.EMPTY_ROW_INDEX) { throw new IndexSizeExceededException("Maximum number of rows reached"); } final int prev = getFacts().putIfAbsent(key, rowIndex); if (TimeAndDims.EMPTY_ROW_INDEX == prev) { numEntries.incrementAndGet(); } else { // We lost a race aggs = indexedMap.get(prev); // Free up the misfire indexedMap.remove(rowIndex); // This is expected to occur ~80% of the time in the worst scenarios } } rowContainer.set(row); for (Aggregator agg : aggs) { synchronized (agg) { try { agg.aggregate(); } catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. if (reportParseExceptions) { throw e; } } } } rowContainer.set(null); return numEntries.get(); } @Override public int getLastRowIndex() { return indexIncrement.get() - 1; } } @Parameterized.Parameters public static Collection<Object[]> getParameters() { return ImmutableList.<Object[]>of( new Object[]{OnheapIncrementalIndex.class}, new Object[]{MapIncrementalIndex.class} ); } private final Class<? extends OnheapIncrementalIndex> incrementalIndex; public OnheapIncrementalIndexBenchmark(Class<? extends OnheapIncrementalIndex> incrementalIndex) { this.incrementalIndex = incrementalIndex; } private static MapBasedInputRow getLongRow(long timestamp, int rowID, int dimensionCount) { List<String> dimensionList = new ArrayList<String>(dimensionCount); ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { String dimName = String.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, new Integer(rowID).longValue()); } return new MapBasedInputRow(timestamp, dimensionList, builder.build()); } @Ignore @Test @BenchmarkOptions(callgc = true, clock = Clock.REAL_TIME, warmupRounds = 10, benchmarkRounds = 20) public void testConcurrentAddRead() throws InterruptedException, ExecutionException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { final int taskCount = 30; final int concurrentThreads = 3; final int elementsPerThread = 1 << 15; final IncrementalIndex incrementalIndex = this.incrementalIndex.getConstructor( IncrementalIndexSchema.class, Boolean.TYPE, Boolean.TYPE, Boolean.TYPE, Boolean.TYPE, Integer.TYPE ).newInstance( new IncrementalIndexSchema.Builder().withMetrics(factories).build(), true, true, false, true, elementsPerThread * taskCount ); final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(dimensionCount + 1); queryAggregatorFactories.add(new CountAggregatorFactory("rows")); for (int i = 0; i < dimensionCount; ++i) { queryAggregatorFactories.add( new LongSumAggregatorFactory( String.format("sumResult%s", i), String.format("sumResult%s", i) ) ); queryAggregatorFactories.add( new DoubleSumAggregatorFactory( String.format("doubleSumResult%s", i), String.format("doubleSumResult%s", i) ) ); } final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator( Executors.newFixedThreadPool( concurrentThreads, new ThreadFactoryBuilder() .setDaemon(false) .setNameFormat("index-executor-%d") .setPriority(Thread.MIN_PRIORITY) .build() ) ); final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator( Executors.newFixedThreadPool( concurrentThreads, new ThreadFactoryBuilder() .setDaemon(false) .setNameFormat("query-executor-%d") .build() ) ); final long timestamp = System.currentTimeMillis(); final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); final List<ListenableFuture<?>> indexFutures = new LinkedList<>(); final List<ListenableFuture<?>> queryFutures = new LinkedList<>(); final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null); final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); final AtomicInteger currentlyRunning = new AtomicInteger(0); final AtomicBoolean concurrentlyRan = new AtomicBoolean(false); final AtomicBoolean someoneRan = new AtomicBoolean(false); for (int j = 0; j < taskCount; j++) { indexFutures.add( indexExecutor.submit( new Runnable() { @Override public void run() { currentlyRunning.incrementAndGet(); try { for (int i = 0; i < elementsPerThread; i++) { incrementalIndex.add(getLongRow(timestamp + i, 1, dimensionCount)); } } catch (IndexSizeExceededException e) { throw Throwables.propagate(e); } currentlyRunning.decrementAndGet(); someoneRan.set(true); } } ) ); queryFutures.add( queryExecutor.submit( new Runnable() { @Override public void run() { QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>( factory.createRunner(incrementalIndexSegment), factory.getToolchest() ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); Map<String, Object> context = new HashMap<String, Object>(); for (Result<TimeseriesResultValue> result : Sequences.toList( runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>() ) ) { if (someoneRan.get()) { Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0); } } if (currentlyRunning.get() > 0) { concurrentlyRan.set(true); } } } ) ); } List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size()); allFutures.addAll(queryFutures); allFutures.addAll(indexFutures); Futures.allAsList(allFutures).get(); //Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get()); queryExecutor.shutdown(); indexExecutor.shutdown(); QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>( factory.createRunner(incrementalIndexSegment), factory.getToolchest() ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) .intervals(ImmutableList.of(queryInterval)) .aggregators(queryAggregatorFactories) .build(); Map<String, Object> context = new HashMap<String, Object>(); List<Result<TimeseriesResultValue>> results = Sequences.toList( runner.run(query, context), new LinkedList<Result<TimeseriesResultValue>>() ); final int expectedVal = elementsPerThread * taskCount; for (Result<TimeseriesResultValue> result : results) { Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue()); for (int i = 0; i < dimensionCount; ++i) { Assert.assertEquals( String.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue() ); Assert.assertEquals( String.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue() ); } } } }
/** * */ package br.com.swconsultoria.efd.contribuicoes.registros.blocoF; /** * @author Yuri Lemes * */ public class RegistroF205 { private final String reg = "F205"; private String vl_cus_inc_acum_ant; private String vl_cus_inc_per_esc; private String vl_cus_inc_acum; private String vl_exc_bc_cus_inc_acum; private String vl_bc_cus_inc; private String cst_pis; private String aliq_pis_percentual; private String vl_cred_pis_acum; private String vl_cred_pis_desc_ant; private String vl_cred_pis_desc; private String vl_cred_pis_desc_fut; private String cst_cofins; private String aliq_cofins_percentual; private String vl_cred_cofins_acum; private String vl_cred_cofins_desc_ant; private String vl_cred_cofins_desc; private String vl_cred_cofins_desc_fut; /** * @return the reg */ public String getReg() { return reg; } /** * @return the vl_cus_inc_acum_ant */ public String getVl_cus_inc_acum_ant() { return vl_cus_inc_acum_ant; } /** * @return the vl_cus_inc_per_esc */ public String getVl_cus_inc_per_esc() { return vl_cus_inc_per_esc; } /** * @return the vl_cus_inc_acum */ public String getVl_cus_inc_acum() { return vl_cus_inc_acum; } /** * @return the vl_exc_bc_cus_inc_acum */ public String getVl_exc_bc_cus_inc_acum() { return vl_exc_bc_cus_inc_acum; } /** * @return the vl_bc_cus_inc */ public String getVl_bc_cus_inc() { return vl_bc_cus_inc; } /** * @return the cst_pis */ public String getCst_pis() { return cst_pis; } /** * @return the aliq_pis_percentual */ public String getAliq_pis_percentual() { return aliq_pis_percentual; } /** * @return the vl_cred_pis_acum */ public String getVl_cred_pis_acum() { return vl_cred_pis_acum; } /** * @return the vl_cred_pis_desc_ant */ public String getVl_cred_pis_desc_ant() { return vl_cred_pis_desc_ant; } /** * @return the vl_cred_pis_desc */ public String getVl_cred_pis_desc() { return vl_cred_pis_desc; } /** * @return the vl_cred_pis_desc_fut */ public String getVl_cred_pis_desc_fut() { return vl_cred_pis_desc_fut; } /** * @return the cst_cofins */ public String getCst_cofins() { return cst_cofins; } /** * @return the aliq_cofins_percentual */ public String getAliq_cofins_percentual() { return aliq_cofins_percentual; } /** * @return the vl_cred_cofins_acum */ public String getVl_cred_cofins_acum() { return vl_cred_cofins_acum; } /** * @return the vl_cred_cofins_desc_ant */ public String getVl_cred_cofins_desc_ant() { return vl_cred_cofins_desc_ant; } /** * @return the vl_cred_cofins_desc */ public String getVl_cred_cofins_desc() { return vl_cred_cofins_desc; } /** * @return the vl_cred_cofins_desc_fut */ public String getVl_cred_cofins_desc_fut() { return vl_cred_cofins_desc_fut; } /** * @param vl_cus_inc_acum_ant * the vl_cus_inc_acum_ant to set */ public void setVl_cus_inc_acum_ant(String vl_cus_inc_acum_ant) { this.vl_cus_inc_acum_ant = vl_cus_inc_acum_ant; } /** * @param vl_cus_inc_per_esc * the vl_cus_inc_per_esc to set */ public void setVl_cus_inc_per_esc(String vl_cus_inc_per_esc) { this.vl_cus_inc_per_esc = vl_cus_inc_per_esc; } /** * @param vl_cus_inc_acum * the vl_cus_inc_acum to set */ public void setVl_cus_inc_acum(String vl_cus_inc_acum) { this.vl_cus_inc_acum = vl_cus_inc_acum; } /** * @param vl_exc_bc_cus_inc_acum * the vl_exc_bc_cus_inc_acum to set */ public void setVl_exc_bc_cus_inc_acum(String vl_exc_bc_cus_inc_acum) { this.vl_exc_bc_cus_inc_acum = vl_exc_bc_cus_inc_acum; } /** * @param vl_bc_cus_inc * the vl_bc_cus_inc to set */ public void setVl_bc_cus_inc(String vl_bc_cus_inc) { this.vl_bc_cus_inc = vl_bc_cus_inc; } /** * @param cst_pis * the cst_pis to set */ public void setCst_pis(String cst_pis) { this.cst_pis = cst_pis; } /** * @param aliq_pis_percentual * the aliq_pis_percentual to set */ public void setAliq_pis_percentual(String aliq_pis_percentual) { this.aliq_pis_percentual = aliq_pis_percentual; } /** * @param vl_cred_pis_acum * the vl_cred_pis_acum to set */ public void setVl_cred_pis_acum(String vl_cred_pis_acum) { this.vl_cred_pis_acum = vl_cred_pis_acum; } /** * @param vl_cred_pis_desc_ant * the vl_cred_pis_desc_ant to set */ public void setVl_cred_pis_desc_ant(String vl_cred_pis_desc_ant) { this.vl_cred_pis_desc_ant = vl_cred_pis_desc_ant; } /** * @param vl_cred_pis_desc * the vl_cred_pis_desc to set */ public void setVl_cred_pis_desc(String vl_cred_pis_desc) { this.vl_cred_pis_desc = vl_cred_pis_desc; } /** * @param vl_cred_pis_desc_fut * the vl_cred_pis_desc_fut to set */ public void setVl_cred_pis_desc_fut(String vl_cred_pis_desc_fut) { this.vl_cred_pis_desc_fut = vl_cred_pis_desc_fut; } /** * @param cst_cofins * the cst_cofins to set */ public void setCst_cofins(String cst_cofins) { this.cst_cofins = cst_cofins; } /** * @param aliq_cofins_percentual * the aliq_cofins_percentual to set */ public void setAliq_cofins_percentual(String aliq_cofins_percentual) { this.aliq_cofins_percentual = aliq_cofins_percentual; } /** * @param vl_cred_cofins_acum * the vl_cred_cofins_acum to set */ public void setVl_cred_cofins_acum(String vl_cred_cofins_acum) { this.vl_cred_cofins_acum = vl_cred_cofins_acum; } /** * @param vl_cred_cofins_desc_ant * the vl_cred_cofins_desc_ant to set */ public void setVl_cred_cofins_desc_ant(String vl_cred_cofins_desc_ant) { this.vl_cred_cofins_desc_ant = vl_cred_cofins_desc_ant; } /** * @param vl_cred_cofins_desc * the vl_cred_cofins_desc to set */ public void setVl_cred_cofins_desc(String vl_cred_cofins_desc) { this.vl_cred_cofins_desc = vl_cred_cofins_desc; } /** * @param vl_cred_cofins_desc_fut * the vl_cred_cofins_desc_fut to set */ public void setVl_cred_cofins_desc_fut(String vl_cred_cofins_desc_fut) { this.vl_cred_cofins_desc_fut = vl_cred_cofins_desc_fut; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.remoting; import java.net.Inet4Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.tests.util.ServiceTestBase; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.junit.Assert; import org.junit.Test; public abstract class NetworkAddressTestBase extends ServiceTestBase { // Constants ----------------------------------------------------- // Attributes ---------------------------------------------------- // Static -------------------------------------------------------- static { try { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); StringBuilder s = new StringBuilder("using network settings:\n"); Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); for (Entry<NetworkInterface, InetAddress> entry : set) { s.append(entry.getKey().getDisplayName() + ": " + entry.getValue().getHostAddress() + "\n"); } } catch (Exception e) { e.printStackTrace(); } } public static Map<NetworkInterface, InetAddress> getAddressForEachNetworkInterface() throws Exception { Map<NetworkInterface, InetAddress> map = new HashMap<NetworkInterface, InetAddress>(); Enumeration<NetworkInterface> ifaces = NetworkInterface.getNetworkInterfaces(); while (ifaces.hasMoreElements()) { NetworkInterface iface = ifaces.nextElement(); Enumeration<InetAddress> enumeration = iface.getInetAddresses(); while (enumeration.hasMoreElements()) { InetAddress inetAddress = enumeration.nextElement(); if (inetAddress instanceof Inet4Address) { map.put(iface, inetAddress); break; } } } return map; } // Constructors -------------------------------------------------- // Public -------------------------------------------------------- @Test public void testConnectToServerWithSameHost() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); if (map.size() > 0) { Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); Iterator<Entry<NetworkInterface, InetAddress>> iterator = set.iterator(); InetAddress address = iterator.next().getValue(); String host = address.getHostAddress(); testConnection(host, host, true, 0); } } @Test public void testConnectToServerAcceptingAllHosts() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); for (Entry<NetworkInterface, InetAddress> entry : set) { String host = entry.getValue().getHostAddress(); testConnection("0.0.0.0", host, true, 0); } } @Test public void testConnectToServerAcceptingOnlyAnotherHost() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); if (map.size() <= 1) { System.err.println("There must be at least 1 network interfaces: test will not be executed"); return; } Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); Iterator<Entry<NetworkInterface, InetAddress>> iterator = set.iterator(); Entry<NetworkInterface, InetAddress> acceptorEntry = iterator.next(); Entry<NetworkInterface, InetAddress> connectorEntry = iterator.next(); testConnection(acceptorEntry.getValue().getHostName(), connectorEntry.getValue().getHostAddress(), false, 0); } @Test public void testConnectToServerUsingLocalPort() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); if (map.size() <= 1) { System.err.println("There must be at least 1 network interfaces: test will not be executed"); return; } Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); Iterator<Entry<NetworkInterface, InetAddress>> iterator = set.iterator(); Entry<NetworkInterface, InetAddress> entry = iterator.next(); String host = entry.getValue().getHostAddress(); testConnection(host, host, true, 7777); } @Test public void testConnectorToServerAcceptingAListOfHosts() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); if (map.size() <= 1) { System.err.println("There must be at least 2 network interfaces: test will not be executed"); return; } Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); Iterator<Entry<NetworkInterface, InetAddress>> iterator = set.iterator(); Entry<NetworkInterface, InetAddress> entry1 = iterator.next(); Entry<NetworkInterface, InetAddress> entry2 = iterator.next(); String listOfHosts = entry1.getValue().getHostAddress() + ", " + entry2.getValue().getHostAddress(); testConnection(listOfHosts, entry1.getValue().getHostAddress(), true, 0); testConnection(listOfHosts, entry2.getValue().getHostAddress(), true, 0); } @Test public void testConnectorToServerAcceptingAListOfHosts_2() throws Exception { Map<NetworkInterface, InetAddress> map = NetworkAddressTestBase.getAddressForEachNetworkInterface(); if (map.size() <= 2) { System.err.println("There must be at least 3 network interfaces: test will not be executed"); return; } Set<Entry<NetworkInterface, InetAddress>> set = map.entrySet(); Iterator<Entry<NetworkInterface, InetAddress>> iterator = set.iterator(); Entry<NetworkInterface, InetAddress> entry1 = iterator.next(); Entry<NetworkInterface, InetAddress> entry2 = iterator.next(); Entry<NetworkInterface, InetAddress> entry3 = iterator.next(); String listOfHosts = entry1.getValue().getHostAddress() + ", " + entry2.getValue().getHostAddress(); testConnection(listOfHosts, entry1.getValue().getHostAddress(), true, 0); testConnection(listOfHosts, entry2.getValue().getHostAddress(), true, 0); testConnection(listOfHosts, entry3.getValue().getHostAddress(), false, 0); } public void testConnection(final String acceptorHost, final String connectorHost, final boolean mustConnect, final int localPort) throws Exception { System.out.println("acceptor=" + acceptorHost + ", connector=" + connectorHost + ", mustConnect=" + mustConnect); Map<String, Object> params = new HashMap<String, Object>(); params.put(getHostPropertyKey(), acceptorHost); TransportConfiguration acceptorConfig = new TransportConfiguration(getAcceptorFactoryClassName(), params); Set<TransportConfiguration> transportConfigs = new HashSet<TransportConfiguration>(); transportConfigs.add(acceptorConfig); Configuration config = createDefaultConfig(true); config.setAcceptorConfigurations(transportConfigs); ActiveMQServer messagingService = createServer(false, config); try { messagingService.start(); params = new HashMap<String, Object>(); params.put(getHostPropertyKey(), connectorHost); if (localPort != 0) { params.put(getLocalPortProperty(), localPort); } TransportConfiguration connectorConfig = new TransportConfiguration(getConnectorFactoryClassName(), params); ServerLocator locator = addServerLocator(ActiveMQClient.createServerLocatorWithoutHA(connectorConfig)); if (mustConnect) { ClientSessionFactory sf = createSessionFactory(locator); if (localPort != 0) { Iterator<RemotingConnection> iterator = messagingService.getRemotingService().getConnections().iterator(); Assert.assertTrue("no connection created", iterator.hasNext()); String address = iterator.next().getTransportConnection().getRemoteAddress(); Assert.assertTrue(address.endsWith(":" + localPort)); } sf.close(); System.out.println("connection OK"); } else { try { locator.createSessionFactory(); Assert.fail("session creation must fail because connector must not be able to connect to the server bound to another network interface"); } catch (Exception e) { } } } finally { messagingService.stop(); } } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- protected abstract String getAcceptorFactoryClassName(); protected abstract String getConnectorFactoryClassName(); protected abstract String getHostPropertyKey(); protected abstract String getLocalPortProperty(); // Private ------------------------------------------------------- // Inner classes ------------------------------------------------- }
/*L * Copyright Washington University in St. Louis * Copyright SemanticBits * Copyright Persistent Systems * Copyright Krishagni * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catissue-tools/LICENSE.txt for details. */ /** * <p>Title: ParticipantLookupAction Class> * <p>Description: This Action Class invokes the Participant Lookup Algorithm and gets matching participants</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author vaishali_khandelwal * @Created on May 19, 2006 */ package edu.wustl.clinportal.action; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.action.ActionMessages; import edu.wustl.clinportal.bizlogic.ParticipantBizLogic; import edu.wustl.clinportal.domain.Participant; import edu.wustl.clinportal.util.global.Constants; import edu.wustl.common.action.BaseAction; import edu.wustl.common.actionForm.AbstractActionForm; import edu.wustl.common.beans.SessionDataBean; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.factory.AbstractFactoryConfig; import edu.wustl.common.factory.IDomainObjectFactory; import edu.wustl.common.factory.IFactory; import edu.wustl.common.lookup.DefaultLookupResult; import edu.wustl.common.util.Utility; import edu.wustl.common.util.logger.Logger; import edu.wustl.dao.exception.DAOException; /** * * @author deepali_ahirrao * */ public class ParticipantLookupAction extends BaseAction { /** * @param mapping object of ActionMapping * @param form object of ActionForm * @param request object of HttpServletRequest * @param response object of HttpServletResponse * @throws Exception generic exception * @return value for ActionForward object */ @Override public ActionForward executeAction(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { String target = null; // NOPMD - DD anomaly Logger.out.debug("Participant Id :" + request.getParameter("participantId")); //checks weather participant is selected from the list and so forwarding to next action instead of participant lookup. boolean isForward = checkForwardToParticipantSelectAction(request); if (isForward) { target = "participantSelect"; } else { AbstractActionForm abstractForm = (AbstractActionForm) form; IDomainObjectFactory domainObjectFactory = AbstractFactoryConfig.getInstance() .getDomainObjectFactory(); AbstractDomainObject abstractDomain = domainObjectFactory.getDomainObject(abstractForm .getFormId(), abstractForm); Participant participant = (Participant) abstractDomain; boolean isCallToLkupLgic = false; //isCallToLookupLogicNeeded(participant); if (isCallToLkupLgic) { IFactory factory = AbstractFactoryConfig.getInstance().getBizLogicFactory(); ParticipantBizLogic bizlogic = (ParticipantBizLogic) factory .getBizLogic(Constants.PARTICIPANT_FORM_ID); SessionDataBean sessionDataBean = getSessionData(request); List matchPartpantLst = bizlogic.getListOfMatchingParticipants(participant, sessionDataBean, null); if (matchPartpantLst != null && !matchPartpantLst.isEmpty()) { ActionMessages messages = new ActionMessages(); messages.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage( "participant.lookup.success", "Submit was not successful because some matching participants found.")); //Creating the column headings for Data Grid List columnList = getColumnHeadingList(bizlogic); request.setAttribute( edu.wustl.common.util.global.Constants.SPREADSHEET_COLUMN_LIST, columnList); //Get the Participant List in Data Grid Format List pcpantDisplayLst = getParticipantDisplayList(matchPartpantLst); request.setAttribute(Constants.SPREADSHEET_DATA_LIST, pcpantDisplayLst); target = Constants.PARTICIPANT_LOOKUP_SUCCESS; if (request.getAttribute("continueLookup") == null) { saveMessages(request, messages); } } // if no participant match found then add the participant in system else { target = Constants.PARTICIPANT_ADD_FORWARD; } } else { target = Constants.PARTICIPANT_ADD_FORWARD; } //if any matching participants are there then show the participants otherwise add the participant //setting the Submitted_for and Forward_to variable in request setRequestAttributes(request); } Logger.out.debug("target:" + target); return mapping.findForward(target); } /** * * @param request * @return */ private boolean checkForwardToParticipantSelectAction(HttpServletRequest request) { boolean isForward = false; // NOPMD - DD anomaly String participantId = "participantId"; if (request.getParameter("continueLookup") == null && request.getAttribute("continueLookup") == null && request.getParameter(participantId) != null && !request.getParameter(participantId).equals("null") && !request.getParameter(participantId).equals("") && !request.getParameter(participantId).equals("0")) { Logger.out.info("inside the participant mapping"); isForward = true; } return isForward; } /** * * @param request */ private void setRequestAttributes(HttpServletRequest request) { if (request.getParameter(Constants.SUBMITTED_FOR) != null && !request.getParameter(edu.wustl.common.util.global.Constants.SUBMITTED_FOR) .equals("")) { request.setAttribute(Constants.SUBMITTED_FOR, request .getParameter(Constants.SUBMITTED_FOR)); } if (request.getParameter(Constants.FORWARD_TO) != null && !request.getParameter(Constants.FORWARD_TO).equals("")) { request.setAttribute(Constants.FORWARD_TO, request.getParameter(Constants.FORWARD_TO)); } request.setAttribute("participantId", ""); } /** * @param participant * @return */ private boolean isCallToLookupLogicNeeded(Participant participant) { boolean flag = true; // NOPMD - DD anomaly if ((participant.getFirstName() == null || participant.getFirstName().length() == 0) && (participant.getMiddleName() == null || participant.getMiddleName().length() == 0) && (participant.getLastName() == null || participant.getLastName().length() == 0) && (participant.getSocialSecurityNumber() == null || participant .getSocialSecurityNumber().length() == 0) && participant.getBirthDate() == null && (participant.getParticipantMedicalIdentifierCollection() == null || participant .getParticipantMedicalIdentifierCollection().size() == 0)) { flag = false; } return flag; } /** * This Function creates the Column Headings for Data Grid * @param bizlogic instance of ParticipantBizLogic * @throws Exception generic exception * @return List Column List * @throws DAOException */ private List getColumnHeadingList(ParticipantBizLogic bizlogic) throws DAOException { //Creating the column list which is used in Data grid to display column headings String[] columnHeaderList = new String[]{Constants.PARTICIPANT_LAST_NAME, Constants.PARTICIPANT_FIRST_NAME, Constants.PARTICIPANT_MIDDLE_NAME, Constants.PARTICIPANT_BIRTH_DATE, Constants.PARTICIPANT_DEATH_DATE, Constants.PARTICIPANT_VITAL_STATUS, Constants.PARTICIPANT_GENDER, Constants.PARTICIPANT_SOCIAL_SECURITY_NUMBER, Constants.PARTICIPANT_MEDICAL_RECORD_NO}; List columnList = new ArrayList(); Logger.out.info("column List header size ;" + columnHeaderList.length); for (int i = 0; i < columnHeaderList.length; i++) { columnList.add(columnHeaderList[i]); } Logger.out.info("column List size ;" + columnList.size()); List displayList = bizlogic.getColumnList(columnList); return displayList; } /** * This functions creates Participant List with each participant information with the match probability * @param participantList list of participant * @return List of Participant Information List */ private List getParticipantDisplayList(List participantList) { List pcpantDisplaylst = new ArrayList(); Iterator itr = participantList.iterator(); String medicalRecordNo = ""; //String siteId = ""; while (itr.hasNext()) { DefaultLookupResult result = (DefaultLookupResult) itr.next(); Participant participant = (Participant) result.getObject(); List participantInfo = new ArrayList(); participantInfo.add(Utility.toString(participant.getLastName())); participantInfo.add(Utility.toString(participant.getFirstName())); participantInfo.add(Utility.toString(participant.getMiddleName())); participantInfo.add(Utility.toString(participant.getBirthDate())); participantInfo.add(Utility.toString(participant.getDeathDate())); participantInfo.add(Utility.toString(participant.getVitalStatus())); participantInfo.add(Utility.toString(participant.getGender())); participantInfo.add(Utility.toString(participant.getSocialSecurityNumber())); if (participant.getParticipantMedicalIdentifierCollection() != null) { Iterator pcpantMedIdItr = participant.getParticipantMedicalIdentifierCollection() .iterator(); while (pcpantMedIdItr.hasNext()) { medicalRecordNo = (String) pcpantMedIdItr.next(); pcpantMedIdItr.next(); } } participantInfo.add(Utility.toString(medicalRecordNo)); participantInfo.add(participant.getId()); pcpantDisplaylst.add(participantInfo); } return pcpantDisplaylst; } }
/* * Copyright (C) 2013-2016 microG Project Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.microg.tools.ui; import android.content.Context; import android.graphics.drawable.Drawable; import android.support.annotation.DrawableRes; import android.support.annotation.PluralsRes; import android.support.annotation.StringRes; import android.support.v4.content.res.ResourcesCompat; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; public class Condition { @DrawableRes private final int iconRes; private final Drawable icon; @StringRes private final int titleRes; @PluralsRes private final int titlePluralsRes; private final CharSequence title; @StringRes private final int summaryRes; @PluralsRes private final int summaryPluralsRes; private final CharSequence summary; @StringRes private final int firstActionTextRes; @PluralsRes private final int firstActionPluralsRes; private final CharSequence firstActionText; private final View.OnClickListener firstActionListener; @StringRes private final int secondActionTextRes; @PluralsRes private final int secondActionPluralsRes; private final CharSequence secondActionText; private final View.OnClickListener secondActionListener; private final Evaluation evaluation; private boolean evaluated = false; private boolean evaluating = false; private int evaluatedPlurals = -1; private boolean active; Condition(Builder builder) { icon = builder.icon; title = builder.title; summary = builder.summary; firstActionText = builder.firstActionText; firstActionListener = builder.firstActionListener; secondActionText = builder.secondActionText; secondActionListener = builder.secondActionListener; summaryRes = builder.summaryRes; iconRes = builder.iconRes; firstActionTextRes = builder.firstActionTextRes; secondActionTextRes = builder.secondActionTextRes; titleRes = builder.titleRes; evaluation = builder.evaluation; titlePluralsRes = builder.titlePluralsRes; summaryPluralsRes = builder.summaryPluralsRes; firstActionPluralsRes = builder.firstActionPluralsRes; secondActionPluralsRes = builder.secondActionPluralsRes; } View createView(final Context context, ViewGroup container) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); View view = inflater.inflate(R.layout.condition_card, container, false); Drawable icon = getIcon(context); if (icon != null) ((ImageView) view.findViewById(android.R.id.icon)).setImageDrawable(icon); ((TextView) view.findViewById(android.R.id.title)).setText(getTitle(context)); ((TextView) view.findViewById(android.R.id.summary)).setText(getSummary(context)); Button first = (Button) view.findViewById(R.id.first_action); first.setText(getFirstActionText(context)); first.setOnClickListener(getFirstActionListener()); CharSequence secondActionText = getSecondActionText(context); if (secondActionText != null) { Button second = (Button) view.findViewById(R.id.second_action); second.setText(secondActionText); second.setOnClickListener(getSecondActionListener()); second.setVisibility(View.VISIBLE); } final View detailGroup = view.findViewById(R.id.detail_group); final ImageView expandIndicator = (ImageView) view.findViewById(R.id.expand_indicator); View.OnClickListener expandListener = new View.OnClickListener() { @Override public void onClick(View v) { if (detailGroup.getVisibility() == View.VISIBLE) { expandIndicator.setImageDrawable(ResourcesCompat.getDrawable(context.getResources(), R.drawable.ic_expand_more, context.getTheme())); detailGroup.setVisibility(View.GONE); } else { expandIndicator.setImageDrawable(ResourcesCompat.getDrawable(context.getResources(), R.drawable.ic_expand_less, context.getTheme())); detailGroup.setVisibility(View.VISIBLE); } } }; view.findViewById(R.id.collapsed_group).setOnClickListener(expandListener); expandIndicator.setOnClickListener(expandListener); view.setTag(this); return view; } public Drawable getIcon(Context context) { if (iconRes != 0) { return ResourcesCompat.getDrawable(context.getResources(), iconRes, context.getTheme()); } return icon; } public CharSequence getTitle(Context context) { if (titleRes != 0) { return context.getString(titleRes); } if (titlePluralsRes != 0) { return context.getResources().getQuantityString(titlePluralsRes, evaluatedPlurals); } return title; } public CharSequence getSummary(Context context) { if (summaryRes != 0) { return context.getString(summaryRes); } if (summaryPluralsRes != 0) { return context.getResources().getQuantityString(summaryPluralsRes, evaluatedPlurals); } return summary; } public View.OnClickListener getFirstActionListener() { return firstActionListener; } public CharSequence getFirstActionText(Context context) { if (firstActionTextRes != 0) { return context.getString(firstActionTextRes); } if (firstActionPluralsRes != 0) { return context.getResources().getQuantityString(firstActionPluralsRes, evaluatedPlurals); } return firstActionText; } public View.OnClickListener getSecondActionListener() { return secondActionListener; } public CharSequence getSecondActionText(Context context) { if (secondActionTextRes != 0) { return context.getString(secondActionTextRes); } if (secondActionPluralsRes != 0) { return context.getResources().getQuantityString(secondActionPluralsRes, evaluatedPlurals); } return secondActionText; } public synchronized boolean willBeEvaluating() { if (!evaluating && !evaluated && evaluation != null) { return evaluating = true; } else { return false; } } public boolean isEvaluated() { return evaluated || evaluation == null; } public synchronized void evaluate(Context context) { active = evaluation == null || evaluation.isActive(context); evaluatedPlurals = evaluation.getPluralsCount(); evaluated = true; evaluating = false; } public boolean isActive(Context context) { if (!evaluated && evaluation != null) evaluate(context); return active; } public void resetEvaluated() { this.evaluated = false; } public static abstract class Evaluation { public abstract boolean isActive(Context context); public int getPluralsCount() { return 1; } } public static class Builder { @DrawableRes private int iconRes; private Drawable icon; @StringRes private int titleRes; @PluralsRes private int titlePluralsRes; private CharSequence title; @StringRes private int summaryRes; @PluralsRes private int summaryPluralsRes; private CharSequence summary; @StringRes private int firstActionTextRes; @PluralsRes private int firstActionPluralsRes; private CharSequence firstActionText; private View.OnClickListener firstActionListener; @StringRes private int secondActionTextRes; @PluralsRes private int secondActionPluralsRes; private CharSequence secondActionText; private View.OnClickListener secondActionListener; private Evaluation evaluation; public Builder() { } public Builder icon(Drawable val) { icon = val; return this; } public Builder icon(@DrawableRes int val) { iconRes = val; return this; } public Builder title(CharSequence val) { title = val; return this; } public Builder title(@StringRes int val) { titleRes = val; return this; } public Builder titlePlurals(@PluralsRes int val) { titlePluralsRes = val; return this; } public Builder summary(CharSequence val) { summary = val; return this; } public Builder summary(@StringRes int val) { summaryRes = val; return this; } public Builder summaryPlurals(@PluralsRes int val) { summaryPluralsRes = val; return this; } public Builder firstAction(CharSequence text, View.OnClickListener listener) { firstActionText = text; firstActionListener = listener; return this; } public Builder firstAction(@StringRes int val, View.OnClickListener listener) { firstActionTextRes = val; firstActionListener = listener; return this; } public Builder firstActionPlurals(@PluralsRes int val, View.OnClickListener listener) { firstActionPluralsRes = val; firstActionListener = listener; return this; } public Builder secondAction(CharSequence text, View.OnClickListener listener) { secondActionText = text; secondActionListener = listener; return this; } public Builder secondAction(@StringRes int val, View.OnClickListener listener) { secondActionTextRes = val; secondActionListener = listener; return this; } public Builder secondActionPlurals(@PluralsRes int val, View.OnClickListener listener) { secondActionPluralsRes = val; secondActionListener = listener; return this; } public Builder evaluation(Evaluation evaluation) { this.evaluation = evaluation; return this; } public Condition build() { return new Condition(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tests.api.java.security; import dalvik.annotation.TestLevel; import dalvik.annotation.TestTargetClass; import dalvik.annotation.TestTargetNew; import java.io.File; import java.io.FilePermission; import java.security.AccessControlContext; import java.security.AccessControlException; import java.security.AccessController; import java.security.Permission; import java.security.PermissionCollection; import java.security.Permissions; import java.security.ProtectionDomain; import java.security.SecurityPermission; import java.util.ArrayList; import java.util.Iterator; import java.util.PropertyPermission; import javax.security.auth.Subject; import javax.security.auth.SubjectDomainCombiner; @TestTargetClass(AccessControlContext.class) public class AccessControlContextTest extends junit.framework.TestCase { private class TestSecurityManager extends SecurityManager { private ArrayList<Permission> notAllowed; public TestSecurityManager() { notAllowed = new ArrayList<Permission>(2); notAllowed.add(new SecurityPermission("createAccessControlContext")); notAllowed.add(new SecurityPermission("getDomainCombiner")); } public void checkPermission(Permission p) { for (Iterator<Permission> i = notAllowed.iterator(); i.hasNext(); ) { if (i.next().equals(p)) { throw new SecurityException(); } } } } /** * @tests java.security.AccessControlContext#AccessControlContext(java.security.ProtectionDomain[]) */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "AccessControlContext", args = {java.security.ProtectionDomain[].class} ) public void test_Constructor$Ljava_security_ProtectionDomain() { // Test for method // java.security.AccessControlContext(java.security.ProtectionDomain []) // Create a permission which is not normally granted final Permission perm = new PropertyPermission("java.class.path", "read"); PermissionCollection col = perm.newPermissionCollection(); col.add(perm); final ProtectionDomain pd = new ProtectionDomain(null, col); AccessControlContext acc = new AccessControlContext( new ProtectionDomain[] { pd }); try { acc.checkPermission(perm); } catch (SecurityException e) { fail("Should have permission"); } final boolean[] result = new boolean[] { false }; Thread th = new Thread(new Runnable() { public void run() { AccessControlContext acc = new AccessControlContext( new ProtectionDomain[] { pd }); try { acc.checkPermission(perm); result[0] = true; } catch (SecurityException e) { } } }); th.start(); try { th.join(); } catch (InterruptedException e) { // ignore } assertTrue("Test 1: Thread should have permission", result[0]); //Null parameter checking try { new AccessControlContext(null); fail("Test 2: NullPointerException expected."); } catch (Exception ex) { //expected } } /** * @tests java.security.AccessControlContext#AccessControlContext(java.security.AccessControlContext, * java.security.DomainCombiner) */ @TestTargetNew( level = TestLevel.PARTIAL_COMPLETE, method = "AccessControlContext", args = {java.security.AccessControlContext.class, java.security.DomainCombiner.class} ) public void test_ConstructorLjava_security_AccessControlContextLjava_security_DomainCombiner() { AccessControlContext context = AccessController.getContext(); try { new AccessControlContext(context, null); } catch (NullPointerException e) { fail("should not throw NullPointerException"); } try { new AccessControlContext(context, new SubjectDomainCombiner( new Subject())); } catch (Exception e) { fail("should not throw Exception"); } } /** * @tests java.security.AccessControlException#checkPermission(Permission) */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "checkPermission", args = {java.security.Permission.class} ) public void test_checkPermission() { char s = File.separatorChar; FilePermission perm[] = new FilePermission[7]; perm[0] = new FilePermission("test1.file", "write"); perm[1] = new FilePermission("test1.file", "read, execute, delete"); perm[2] = new FilePermission(s + "tmp" + s + "test" + s + "*", "read, write"); perm[3] = new FilePermission(s + "tmp" + s + "test" + s + "collection.file", "read"); perm[4] = new FilePermission(s + "windows" + "*", "delete"); perm[5] = new FilePermission("aFile.file", "read"); perm[6] = new FilePermission("hello.file", "write"); Permissions perms = new Permissions(); for (int i = 0; i < perm.length; i++) { perms.add(perm[i]); } ProtectionDomain pd = new ProtectionDomain(null, perms); AccessControlContext acc = new AccessControlContext( new ProtectionDomain[] { pd }); for (int i = 0; i < perm.length; i++) { try { acc.checkPermission(perm[i]); } catch (AccessControlException e) { fail("Should have permission " + perm[i]); } } try { acc.checkPermission(new FilePermission("test1.file", "execute")); } catch (AccessControlException e) { fail("Should have permission "); } try { acc.checkPermission(new FilePermission(s + "tmp" + s + "test" + s + "hello.file", "read")); } catch (AccessControlException e) { fail("Should have permission "); } try { acc.checkPermission(new FilePermission("test2.file", "execute")); fail("SecurityException expected"); } catch (AccessControlException e) { // expected } try { acc.checkPermission(new FilePermission(s + "tmp" + s + "test" + s + "hello.file", "delete")); fail("SecurityException expected"); } catch (AccessControlException e) { // expected } try { acc.checkPermission(null); fail("NullPointerException expected"); } catch (NullPointerException npe) { // expected } } /** * @tests java.security.AccessControlException#equals() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "equals", args = {java.lang.Object.class} ) public void test_equals() { final Permission perm1 = new PropertyPermission("java.class.path", "read"); final Permission perm2 = new PropertyPermission("java.path", "write"); PermissionCollection col1 = perm1.newPermissionCollection(); col1.add(perm1); final ProtectionDomain pd1 = new ProtectionDomain(null, col1); AccessControlContext acc1 = new AccessControlContext( new ProtectionDomain[] { pd1 }); AccessControlContext acc2 = new AccessControlContext( new ProtectionDomain[] { pd1 }); PermissionCollection col2 = perm2.newPermissionCollection(); col2.add(perm2); col2.add(perm2); final ProtectionDomain pd2 = new ProtectionDomain(null, col2); AccessControlContext acc3 = new AccessControlContext( new ProtectionDomain[] { pd2 }); assertFalse(acc1.equals(null)); assertFalse(acc2.equals(null)); assertFalse(acc3.equals(null)); assertTrue(acc1.equals(acc2)); assertTrue(acc2.equals(acc1)); assertFalse(acc1.equals(acc3)); assertFalse(acc2.equals(acc3)); AccessControlContext context = AccessController.getContext(); AccessControlContext acc4 = new AccessControlContext(context, null); AccessControlContext acc5 = new AccessControlContext(context, new SubjectDomainCombiner(new Subject())); AccessControlContext acc6 = new AccessControlContext(context, null); assertFalse(acc4.equals(null)); assertFalse(acc5.equals(null)); assertFalse(acc4.equals(acc5)); assertFalse(acc5.equals(acc4)); assertTrue(acc4.equals(acc6)); assertTrue(acc6.equals(acc4)); } /** * @tests java.security.AccessControlException#getDomainCombiner() */ @TestTargetNew( level = TestLevel.COMPLETE, method = "getDomainCombiner", args = {} ) public void test_getDomainCombiner() { AccessControlContext context = AccessController.getContext(); AccessControlContext acc1 = new AccessControlContext(context, null); AccessControlContext acc2 = new AccessControlContext(context, new SubjectDomainCombiner(new Subject())); final Permission perm1 = new PropertyPermission("java.class.path", "read"); PermissionCollection col1 = perm1.newPermissionCollection(); col1.add(perm1); final ProtectionDomain pd1 = new ProtectionDomain(null, col1); AccessControlContext acc3 = new AccessControlContext( new ProtectionDomain[] { pd1 }); assertNull(acc1.getDomainCombiner()); assertNotNull(acc2.getDomainCombiner()); assertNull(acc3.getDomainCombiner()); } /** * @tests java.security.AccessControlException#hashCode() */ @TestTargetNew( level = TestLevel.COMPLETE, notes = "", method = "hashCode", args = {} ) public void test_hashCode() { final Permission perm1 = new PropertyPermission("java.class.path", "read"); final Permission perm2 = new PropertyPermission("java.path", "write"); PermissionCollection col1 = perm1.newPermissionCollection(); col1.add(perm1); final ProtectionDomain pd1 = new ProtectionDomain(null, col1); AccessControlContext acc1 = new AccessControlContext( new ProtectionDomain[] { pd1 }); AccessControlContext acc2 = new AccessControlContext( new ProtectionDomain[] { pd1 }); PermissionCollection col2 = perm2.newPermissionCollection(); col2.add(perm2); col2.add(perm2); final ProtectionDomain pd2 = new ProtectionDomain(null, col2); AccessControlContext acc3 = new AccessControlContext( new ProtectionDomain[] { pd2 }); assertTrue(acc1.hashCode() == acc1.hashCode()); assertTrue(acc2.hashCode() == acc2.hashCode()); assertTrue(acc3.hashCode() == acc3.hashCode()); assertTrue(acc1.hashCode() == acc2.hashCode()); assertTrue(acc2.hashCode() != acc3.hashCode()); assertTrue(acc3.hashCode() != acc1.hashCode()); AccessControlContext context = AccessController.getContext(); AccessControlContext acc4 = new AccessControlContext(context, null); AccessControlContext acc5 = new AccessControlContext(context, new SubjectDomainCombiner(new Subject())); AccessControlContext acc6 = new AccessControlContext(context, null); assertTrue(acc4.hashCode() == acc4.hashCode()); assertTrue(acc5.hashCode() == acc5.hashCode()); assertTrue(acc6.hashCode() == acc6.hashCode()); assertTrue(acc4.hashCode() == acc5.hashCode()); assertTrue(acc5.hashCode() == acc6.hashCode()); assertTrue(acc6.hashCode() == acc4.hashCode()); } }
/** * Copyright 2007-2015, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.k3po.control.internal; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandler; import java.nio.charset.Charset; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.jmock.Expectations; import org.jmock.api.Action; import org.jmock.api.Invocation; import org.jmock.integration.junit4.JUnitRuleMockery; import org.jmock.lib.legacy.ClassImposteriser; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.kaazing.k3po.control.internal.command.AbortCommand; import org.kaazing.k3po.control.internal.command.CloseCommand; import org.kaazing.k3po.control.internal.command.PrepareCommand; import org.kaazing.k3po.control.internal.command.StartCommand; import org.kaazing.k3po.control.internal.event.CommandEvent; import org.kaazing.k3po.control.internal.event.ErrorEvent; import org.kaazing.k3po.control.internal.event.FinishedEvent; import org.kaazing.k3po.control.internal.event.PreparedEvent; import org.kaazing.k3po.control.internal.event.StartedEvent; public class ControlTest { private static final Charset UTF_8 = Charset.forName("UTF-8"); private Control control; @Rule public JUnitRuleMockery mockery = new JUnitRuleMockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; private InputStream input; private OutputStream output; @Before public void setupControl() throws Exception { input = mockery.mock(InputStream.class); output = mockery.mock(OutputStream.class); control = new Control(new URL(null, "test://internal", new URLStreamHandler() { @Override protected URLConnection openConnection(URL location) throws IOException { return new URLConnection(location) { @Override public void connect() throws IOException { // no-op } @Override public InputStream getInputStream() { return input; } @Override public OutputStream getOutputStream() { return output; } }; } })); } @Test(expected = IllegalStateException.class) public void shouldNotWriteCommand() throws Exception { StartCommand start = new StartCommand(); control.writeCommand(start); } @Test(expected = IllegalStateException.class) public void shouldNotReadEvent() throws Exception { control.readEvent(); } @Test public void shouldConnect() throws Exception { control.connect(); } @Test public void shouldConnectAndDisconnect() throws Exception { mockery.checking(new Expectations() { { oneOf(input).close(); oneOf(output).close(); } }); control.connect(); control.disconnect(); } @Test public void shouldWritePrepareCommand() throws Exception { String path = "org/kaazing/robot/control/myscript"; final byte[] expectedPrepare = ("PREPARE\n" + "version:2.0\n" + "content-length:0\n" + "name:" + path + "\n" + "\n").getBytes(UTF_8); mockery.checking(new Expectations() { { oneOf(output).write(with(hasInitialBytes(expectedPrepare)), with(equal(0)), with(equal(expectedPrepare.length))); oneOf(output).flush(); } }); PrepareCommand prepare = new PrepareCommand(); prepare.setName(path); control.connect(); control.writeCommand(prepare); } @Test public void shouldWriteStartCommand() throws Exception { final byte[] expectedStart = ("START\n" + "\n").getBytes(UTF_8); mockery.checking(new Expectations() { { oneOf(output).write(with(hasInitialBytes(expectedStart)), with(equal(0)), with(equal(expectedStart.length))); oneOf(output).flush(); } }); StartCommand start = new StartCommand(); control.connect(); control.writeCommand(start); } @Test public void shouldWriteAbortCommand() throws Exception { final byte[] expectedAbort = ("ABORT\n" + "\n").getBytes(UTF_8); mockery.checking(new Expectations() { { oneOf(output).write(with(hasInitialBytes(expectedAbort)), with(equal(0)), with(equal(expectedAbort.length))); oneOf(output).flush(); } }); AbortCommand abort = new AbortCommand(); control.connect(); control.writeCommand(abort); } @Test public void shouldWriteCloseCommand() throws Exception { final byte[] expectedClose = ("CLOSE\n" + "\n").getBytes(UTF_8); mockery.checking(new Expectations() { { oneOf(output).write(with(hasInitialBytes(expectedClose)), with(equal(0)), with(equal(expectedClose.length))); oneOf(output).flush(); } }); CloseCommand close = new CloseCommand(); control.connect(); control.writeCommand(close); } @Test public void shouldReadPreparedEvent() throws Exception { PreparedEvent expectedPrepared = new PreparedEvent(); expectedPrepared.setScript("# comment"); mockery.checking(new Expectations() { { atLeast(1).of(input).read(); will(readBytes(("PREPARED\n" + "content-length:9\n" + "future-header:future-value\n" + // test forward compatibility "\n").getBytes(UTF_8))); oneOf(input).read(with(any(byte[].class)), with(equal(0)), with(any(int.class))); will(readBytes(0, "# comment".getBytes(UTF_8))); } }); control.connect(); CommandEvent finished = control.readEvent(); assertEquals(expectedPrepared, finished); } @Test public void shouldReadStartedEvent() throws Exception { StartedEvent expectedStarted = new StartedEvent(); mockery.checking(new Expectations() { { atLeast(1).of(input).read(); will(readBytes(("STARTED\n" + "future-header:future-value\n" + // test forward compatibility "\n").getBytes(UTF_8))); } }); control.connect(); CommandEvent started = control.readEvent(); assertEquals(expectedStarted, started); } @Test public void shouldReadFinishedEvent() throws Exception { FinishedEvent expectedFinished = new FinishedEvent(); expectedFinished.setScript("# comment"); mockery.checking(new Expectations() { { atLeast(1).of(input).read(); will(readBytes(("FINISHED\n" + "content-length:9\n" + "future-header:future-value\n" + // test forward compatibility "\n").getBytes(UTF_8))); oneOf(input).read(with(any(byte[].class)), with(equal(0)), with(any(int.class))); will(readBytes(0, "# comment".getBytes(UTF_8))); } }); control.connect(); CommandEvent finished = control.readEvent(); assertEquals(expectedFinished, finished); } @Test public void shouldReadErrorEvent() throws Exception { ErrorEvent expectedError = new ErrorEvent(); expectedError.setSummary("summary text"); expectedError.setDescription("description text"); mockery.checking(new Expectations() { { atLeast(1).of(input).read(); will(readBytes(("ERROR\n" + "summary:summary text\n" + "content-length:16\n" + "future-header:future-value\n" + // test forward compatibility "\n").getBytes(UTF_8))); oneOf(input).read(with(any(byte[].class)), with(equal(0)), with(any(int.class))); will(readBytes(0, "description text".getBytes(UTF_8))); } }); control.connect(); CommandEvent error = control.readEvent(); assertEquals(expectedError, error); } private static Matcher<byte[]> hasInitialBytes(final byte[] expected) { return new BaseMatcher<byte[]>() { @Override public boolean matches(Object item) { if (!(item instanceof byte[])) { return false; } byte[] actual = (byte[]) item; if (actual.length < expected.length) { return false; } for (int i = 0; i < expected.length; i++) { if (actual[i] != expected[i]) { return false; } } return true; } @Override public void describeTo(Description description) { description.appendText("has initial bytes"); } }; } private static Action readBytes(final byte[] bytes) { Action[] actions = new Action[bytes.length]; for (int i=0; i < bytes.length; i++) { actions[i] = Expectations.returnValue((int) bytes[i]); } return Expectations.onConsecutiveCalls(actions); } private static Action readBytes(final int parameter, final byte[] bytes) { return new Action() { @Override public Object invoke(Invocation invocation) throws Throwable { byte[] array = (byte[]) invocation.getParameter(parameter); if (array.length < bytes.length) { throw new IndexOutOfBoundsException(); } for (int i = 0; i < bytes.length; i++) { array[i] = bytes[i]; } return bytes.length; } @Override public void describeTo(Description description) { description.appendText("read initial bytes"); } }; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Duration; import org.joda.time.Interval; import java.util.List; import java.util.Map; /** */ @ExtensionPoint public abstract class BaseQuery<T extends Comparable<T>> implements Query<T> { public static void checkInterrupted() { if (Thread.interrupted()) { throw new QueryInterruptedException(new InterruptedException()); } } public static final String QUERYID = "queryId"; private final DataSource dataSource; private final boolean descending; private final Map<String, Object> context; private final QuerySegmentSpec querySegmentSpec; private volatile Duration duration; public BaseQuery( DataSource dataSource, QuerySegmentSpec querySegmentSpec, boolean descending, Map<String, Object> context ) { Preconditions.checkNotNull(dataSource, "dataSource can't be null"); Preconditions.checkNotNull(querySegmentSpec, "querySegmentSpec can't be null"); this.dataSource = dataSource; this.context = context; this.querySegmentSpec = querySegmentSpec; this.descending = descending; } @JsonProperty @Override public DataSource getDataSource() { return dataSource; } @JsonProperty @Override public boolean isDescending() { return descending; } @JsonProperty("intervals") public QuerySegmentSpec getQuerySegmentSpec() { return querySegmentSpec; } @Override public QueryRunner<T> getRunner(QuerySegmentWalker walker) { return querySegmentSpec.lookup(this, walker); } @Override public List<Interval> getIntervals() { return querySegmentSpec.getIntervals(); } @Override public Duration getDuration() { if (duration == null) { Duration totalDuration = new Duration(0); for (Interval interval : querySegmentSpec.getIntervals()) { if (interval != null) { totalDuration = totalDuration.plus(interval.toDuration()); } } duration = totalDuration; } return duration; } @Override @JsonProperty public Map<String, Object> getContext() { return context; } @Override public <ContextType> ContextType getContextValue(String key) { return context == null ? null : (ContextType) context.get(key); } @Override public <ContextType> ContextType getContextValue(String key, ContextType defaultValue) { ContextType retVal = getContextValue(key); return retVal == null ? defaultValue : retVal; } @Override public boolean getContextBoolean(String key, boolean defaultValue) { return QueryContexts.parseBoolean(this, key, defaultValue); } /** * @deprecated use {@link #computeOverriddenContext(Map, Map) computeOverriddenContext(getContext(), overrides))} * instead. This method may be removed in the next minor or major version of Druid. */ @Deprecated protected Map<String, Object> computeOverridenContext(final Map<String, Object> overrides) { return computeOverriddenContext(getContext(), overrides); } protected static Map<String, Object> computeOverriddenContext( final Map<String, Object> context, final Map<String, Object> overrides ) { Map<String, Object> overridden = Maps.newTreeMap(); if (context != null) { overridden.putAll(context); } overridden.putAll(overrides); return overridden; } @Override public Ordering<T> getResultOrdering() { Ordering<T> retVal = Ordering.natural(); return descending ? retVal.reverse() : retVal; } @Override public String getId() { return (String) getContextValue(QUERYID); } @Override public Query withId(String id) { return withOverriddenContext(ImmutableMap.of(QUERYID, id)); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BaseQuery baseQuery = (BaseQuery) o; if (descending != baseQuery.descending) { return false; } if (context != null ? !context.equals(baseQuery.context) : baseQuery.context != null) { return false; } if (dataSource != null ? !dataSource.equals(baseQuery.dataSource) : baseQuery.dataSource != null) { return false; } if (duration != null ? !duration.equals(baseQuery.duration) : baseQuery.duration != null) { return false; } if (querySegmentSpec != null ? !querySegmentSpec.equals(baseQuery.querySegmentSpec) : baseQuery.querySegmentSpec != null) { return false; } return true; } @Override public int hashCode() { int result = dataSource != null ? dataSource.hashCode() : 0; result = 31 * result + (descending ? 1 : 0); result = 31 * result + (context != null ? context.hashCode() : 0); result = 31 * result + (querySegmentSpec != null ? querySegmentSpec.hashCode() : 0); result = 31 * result + (duration != null ? duration.hashCode() : 0); return result; } }
/* * Copyright (c) 1996, 2009, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.beans; import com.sun.beans.finder.ClassFinder; import java.applet.Applet; import java.applet.AppletContext; import java.applet.AppletStub; import java.applet.AudioClip; import java.awt.Image; import java.beans.beancontext.BeanContext; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectStreamClass; import java.io.StreamCorruptedException; import java.lang.reflect.Modifier; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Enumeration; import java.util.Hashtable; import java.util.Iterator; import java.util.Vector; /** * This class provides some general purpose beans control methods. */ public class Beans { /** * <p> * Instantiate a JavaBean. * </p> * * @param cls the class-loader from which we should create * the bean. If this is null, then the system * class-loader is used. * @param beanName the name of the bean within the class-loader. * For example "sun.beanbox.foobah" * * @exception ClassNotFoundException if the class of a serialized * object could not be found. * @exception IOException if an I/O error occurs. */ public static Object instantiate(ClassLoader cls, String beanName) throws IOException, ClassNotFoundException { return Beans.instantiate(cls, beanName, null, null); } /** * <p> * Instantiate a JavaBean. * </p> * * @param cls the class-loader from which we should create * the bean. If this is null, then the system * class-loader is used. * @param beanName the name of the bean within the class-loader. * For example "sun.beanbox.foobah" * @param beanContext The BeanContext in which to nest the new bean * * @exception ClassNotFoundException if the class of a serialized * object could not be found. * @exception IOException if an I/O error occurs. */ public static Object instantiate(ClassLoader cls, String beanName, BeanContext beanContext) throws IOException, ClassNotFoundException { return Beans.instantiate(cls, beanName, beanContext, null); } /** * Instantiate a bean. * <p> * The bean is created based on a name relative to a class-loader. * This name should be a dot-separated name such as "a.b.c". * <p> * In Beans 1.0 the given name can indicate either a serialized object * or a class. Other mechanisms may be added in the future. In * beans 1.0 we first try to treat the beanName as a serialized object * name then as a class name. * <p> * When using the beanName as a serialized object name we convert the * given beanName to a resource pathname and add a trailing ".ser" suffix. * We then try to load a serialized object from that resource. * <p> * For example, given a beanName of "x.y", Beans.instantiate would first * try to read a serialized object from the resource "x/y.ser" and if * that failed it would try to load the class "x.y" and create an * instance of that class. * <p> * If the bean is a subtype of java.applet.Applet, then it is given * some special initialization. First, it is supplied with a default * AppletStub and AppletContext. Second, if it was instantiated from * a classname the applet's "init" method is called. (If the bean was * deserialized this step is skipped.) * <p> * Note that for beans which are applets, it is the caller's responsiblity * to call "start" on the applet. For correct behaviour, this should be done * after the applet has been added into a visible AWT container. * <p> * Note that applets created via beans.instantiate run in a slightly * different environment than applets running inside browsers. In * particular, bean applets have no access to "parameters", so they may * wish to provide property get/set methods to set parameter values. We * advise bean-applet developers to test their bean-applets against both * the JDK appletviewer (for a reference browser environment) and the * BDK BeanBox (for a reference bean container). * * @param cls the class-loader from which we should create * the bean. If this is null, then the system * class-loader is used. * @param beanName the name of the bean within the class-loader. * For example "sun.beanbox.foobah" * @param beanContext The BeanContext in which to nest the new bean * @param initializer The AppletInitializer for the new bean * * @exception ClassNotFoundException if the class of a serialized * object could not be found. * @exception IOException if an I/O error occurs. */ public static Object instantiate(ClassLoader cls, String beanName, BeanContext beanContext, AppletInitializer initializer) throws IOException, ClassNotFoundException { InputStream ins; ObjectInputStream oins = null; Object result = null; boolean serialized = false; IOException serex = null; // If the given classloader is null, we check if an // system classloader is available and (if so) // use that instead. // Note that calls on the system class loader will // look in the bootstrap class loader first. if (cls == null) { try { cls = ClassLoader.getSystemClassLoader(); } catch (SecurityException ex) { // We're not allowed to access the system class loader. // Drop through. } } // Try to find a serialized object with this name final String serName = beanName.replace('.','/').concat(".ser"); final ClassLoader loader = cls; ins = (InputStream)AccessController.doPrivileged (new PrivilegedAction() { public Object run() { if (loader == null) return ClassLoader.getSystemResourceAsStream(serName); else return loader.getResourceAsStream(serName); } }); if (ins != null) { try { if (cls == null) { oins = new ObjectInputStream(ins); } else { oins = new ObjectInputStreamWithLoader(ins, cls); } result = oins.readObject(); serialized = true; oins.close(); } catch (IOException ex) { ins.close(); // Drop through and try opening the class. But remember // the exception in case we can't find the class either. serex = ex; } catch (ClassNotFoundException ex) { ins.close(); throw ex; } } if (result == null) { // No serialized object, try just instantiating the class Class cl; try { cl = ClassFinder.findClass(beanName, cls); } catch (ClassNotFoundException ex) { // There is no appropriate class. If we earlier tried to // deserialize an object and got an IO exception, throw that, // otherwise rethrow the ClassNotFoundException. if (serex != null) { throw serex; } throw ex; } if (!Modifier.isPublic(cl.getModifiers())) { throw new ClassNotFoundException("" + cl + " : no public access"); } /* * Try to instantiate the class. */ try { result = cl.newInstance(); } catch (Exception ex) { // We have to remap the exception to one in our signature. // But we pass extra information in the detail message. throw new ClassNotFoundException("" + cl + " : " + ex, ex); } } if (result != null) { // Ok, if the result is an applet initialize it. AppletStub stub = null; if (result instanceof Applet) { Applet applet = (Applet) result; boolean needDummies = initializer == null; if (needDummies) { // Figure our the codebase and docbase URLs. We do this // by locating the URL for a known resource, and then // massaging the URL. // First find the "resource name" corresponding to the bean // itself. So a serialzied bean "a.b.c" would imply a // resource name of "a/b/c.ser" and a classname of "x.y" // would imply a resource name of "x/y.class". final String resourceName; if (serialized) { // Serialized bean resourceName = beanName.replace('.','/').concat(".ser"); } else { // Regular class resourceName = beanName.replace('.','/').concat(".class"); } URL objectUrl = null; URL codeBase = null; URL docBase = null; // Now get the URL correponding to the resource name. final ClassLoader cloader = cls; objectUrl = (URL) AccessController.doPrivileged (new PrivilegedAction() { public Object run() { if (cloader == null) return ClassLoader.getSystemResource (resourceName); else return cloader.getResource(resourceName); } }); // If we found a URL, we try to locate the docbase by taking // of the final path name component, and the code base by taking // of the complete resourceName. // So if we had a resourceName of "a/b/c.class" and we got an // objectURL of "file://bert/classes/a/b/c.class" then we would // want to set the codebase to "file://bert/classes/" and the // docbase to "file://bert/classes/a/b/" if (objectUrl != null) { String s = objectUrl.toExternalForm(); if (s.endsWith(resourceName)) { int ix = s.length() - resourceName.length(); codeBase = new URL(s.substring(0,ix)); docBase = codeBase; ix = s.lastIndexOf('/'); if (ix >= 0) { docBase = new URL(s.substring(0,ix+1)); } } } // Setup a default context and stub. BeansAppletContext context = new BeansAppletContext(applet); stub = (AppletStub)new BeansAppletStub(applet, context, codeBase, docBase); applet.setStub(stub); } else { initializer.initialize(applet, beanContext); } // now, if there is a BeanContext, add the bean, if applicable. if (beanContext != null) { beanContext.add(result); } // If it was deserialized then it was already init-ed. // Otherwise we need to initialize it. if (!serialized) { // We need to set a reasonable initial size, as many // applets are unhappy if they are started without // having been explicitly sized. applet.setSize(100,100); applet.init(); } if (needDummies) { ((BeansAppletStub)stub).active = true; } else initializer.activate(applet); } else if (beanContext != null) beanContext.add(result); } return result; } /** * From a given bean, obtain an object representing a specified * type view of that source object. * <p> * The result may be the same object or a different object. If * the requested target view isn't available then the given * bean is returned. * <p> * This method is provided in Beans 1.0 as a hook to allow the * addition of more flexible bean behaviour in the future. * * @param bean Object from which we want to obtain a view. * @param targetType The type of view we'd like to get. * */ public static Object getInstanceOf(Object bean, Class<?> targetType) { return bean; } /** * Check if a bean can be viewed as a given target type. * The result will be true if the Beans.getInstanceof method * can be used on the given bean to obtain an object that * represents the specified targetType type view. * * @param bean Bean from which we want to obtain a view. * @param targetType The type of view we'd like to get. * @return "true" if the given bean supports the given targetType. * */ public static boolean isInstanceOf(Object bean, Class<?> targetType) { return Introspector.isSubclass(bean.getClass(), targetType); } /** * Test if we are in design-mode. * * @return True if we are running in an application construction * environment. * * @see DesignMode */ public static boolean isDesignTime() { return ThreadGroupContext.getContext().isDesignTime(); } /** * Determines whether beans can assume a GUI is available. * * @return True if we are running in an environment where beans * can assume that an interactive GUI is available, so they * can pop up dialog boxes, etc. This will normally return * true in a windowing environment, and will normally return * false in a server environment or if an application is * running as part of a batch job. * * @see Visibility * */ public static boolean isGuiAvailable() { return ThreadGroupContext.getContext().isGuiAvailable(); } /** * Used to indicate whether of not we are running in an application * builder environment. * * <p>Note that this method is security checked * and is not available to (for example) untrusted applets. * More specifically, if there is a security manager, * its <code>checkPropertiesAccess</code> * method is called. This could result in a SecurityException. * * @param isDesignTime True if we're in an application builder tool. * @exception SecurityException if a security manager exists and its * <code>checkPropertiesAccess</code> method doesn't allow setting * of system properties. * @see SecurityManager#checkPropertiesAccess */ public static void setDesignTime(boolean isDesignTime) throws SecurityException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPropertiesAccess(); } ThreadGroupContext.getContext().setDesignTime(isDesignTime); } /** * Used to indicate whether of not we are running in an environment * where GUI interaction is available. * * <p>Note that this method is security checked * and is not available to (for example) untrusted applets. * More specifically, if there is a security manager, * its <code>checkPropertiesAccess</code> * method is called. This could result in a SecurityException. * * @param isGuiAvailable True if GUI interaction is available. * @exception SecurityException if a security manager exists and its * <code>checkPropertiesAccess</code> method doesn't allow setting * of system properties. * @see SecurityManager#checkPropertiesAccess */ public static void setGuiAvailable(boolean isGuiAvailable) throws SecurityException { SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPropertiesAccess(); } ThreadGroupContext.getContext().setGuiAvailable(isGuiAvailable); } } /** * This subclass of ObjectInputStream delegates loading of classes to * an existing ClassLoader. */ class ObjectInputStreamWithLoader extends ObjectInputStream { private ClassLoader loader; /** * Loader must be non-null; */ public ObjectInputStreamWithLoader(InputStream in, ClassLoader loader) throws IOException, StreamCorruptedException { super(in); if (loader == null) { throw new IllegalArgumentException("Illegal null argument to ObjectInputStreamWithLoader"); } this.loader = loader; } /** * Use the given ClassLoader rather than using the system class */ protected Class resolveClass(ObjectStreamClass classDesc) throws IOException, ClassNotFoundException { String cname = classDesc.getName(); return ClassFinder.resolveClass(cname, this.loader); } } /** * Package private support class. This provides a default AppletContext * for beans which are applets. */ class BeansAppletContext implements AppletContext { Applet target; Hashtable imageCache = new Hashtable(); BeansAppletContext(Applet target) { this.target = target; } public AudioClip getAudioClip(URL url) { // We don't currently support audio clips in the Beans.instantiate // applet context, unless by some luck there exists a URL content // class that can generate an AudioClip from the audio URL. try { return (AudioClip) url.getContent(); } catch (Exception ex) { return null; } } public synchronized Image getImage(URL url) { Object o = imageCache.get(url); if (o != null) { return (Image)o; } try { o = url.getContent(); if (o == null) { return null; } if (o instanceof Image) { imageCache.put(url, o); return (Image) o; } // Otherwise it must be an ImageProducer. Image img = target.createImage((java.awt.image.ImageProducer)o); imageCache.put(url, img); return img; } catch (Exception ex) { return null; } } public Applet getApplet(String name) { return null; } public Enumeration getApplets() { Vector applets = new Vector(); applets.addElement(target); return applets.elements(); } public void showDocument(URL url) { // We do nothing. } public void showDocument(URL url, String target) { // We do nothing. } public void showStatus(String status) { // We do nothing. } public void setStream(String key, InputStream stream)throws IOException{ // We do nothing. } public InputStream getStream(String key){ // We do nothing. return null; } public Iterator getStreamKeys(){ // We do nothing. return null; } } /** * Package private support class. This provides an AppletStub * for beans which are applets. */ class BeansAppletStub implements AppletStub { transient boolean active; transient Applet target; transient AppletContext context; transient URL codeBase; transient URL docBase; BeansAppletStub(Applet target, AppletContext context, URL codeBase, URL docBase) { this.target = target; this.context = context; this.codeBase = codeBase; this.docBase = docBase; } public boolean isActive() { return active; } public URL getDocumentBase() { // use the root directory of the applet's class-loader return docBase; } public URL getCodeBase() { // use the directory where we found the class or serialized object. return codeBase; } public String getParameter(String name) { return null; } public AppletContext getAppletContext() { return context; } public void appletResize(int width, int height) { // we do nothing. } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.internal; import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.DelegatingHasContextAndHeaders; import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable { private static ThreadLocal<SearchContext> current = new ThreadLocal<>(); public final static int DEFAULT_TERMINATE_AFTER = 0; public static void setCurrent(SearchContext value) { current.set(value); QueryShardContext.setTypes(value.types()); } public static void removeCurrent() { current.remove(); QueryShardContext.removeTypes(); } public static SearchContext current() { return current.get(); } private Map<Lifetime, List<Releasable>> clearables = null; private final AtomicBoolean closed = new AtomicBoolean(false); protected final ParseFieldMatcher parseFieldMatcher; protected SearchContext(ParseFieldMatcher parseFieldMatcher, HasContextAndHeaders contextHeaders) { super(contextHeaders); this.parseFieldMatcher = parseFieldMatcher; } public ParseFieldMatcher parseFieldMatcher() { return parseFieldMatcher; } @Override public final void close() { if (closed.compareAndSet(false, true)) { // prevent double release try { clearReleasables(Lifetime.CONTEXT); } finally { doClose(); } } } private boolean nowInMillisUsed; protected abstract void doClose(); /** * Should be called before executing the main query and after all other parameters have been set. */ public abstract void preProcess(); public abstract Query searchFilter(String[] types); public abstract long id(); public abstract String source(); public abstract ShardSearchRequest request(); public abstract SearchType searchType(); public abstract SearchContext searchType(SearchType searchType); public abstract SearchShardTarget shardTarget(); public abstract int numberOfShards(); public abstract boolean hasTypes(); public abstract String[] types(); public abstract float queryBoost(); public abstract SearchContext queryBoost(float queryBoost); public abstract long getOriginNanoTime(); public final long nowInMillis() { nowInMillisUsed = true; return nowInMillisImpl(); } public final boolean nowInMillisUsed() { return nowInMillisUsed; } protected abstract long nowInMillisImpl(); public abstract ScrollContext scrollContext(); public abstract SearchContext scrollContext(ScrollContext scroll); public abstract SearchContextAggregations aggregations(); public abstract SearchContext aggregations(SearchContextAggregations aggregations); public abstract <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory); public abstract SearchContextHighlight highlight(); public abstract void highlight(SearchContextHighlight highlight); public abstract void innerHits(InnerHitsContext innerHitsContext); public abstract InnerHitsContext innerHits(); public abstract SuggestionSearchContext suggest(); public abstract void suggest(SuggestionSearchContext suggest); /** * @return list of all rescore contexts. empty if there aren't any. */ public abstract List<RescoreSearchContext> rescore(); public abstract void addRescore(RescoreSearchContext rescore); public abstract boolean hasScriptFields(); public abstract ScriptFieldsContext scriptFields(); /** * A shortcut function to see whether there is a fetchSourceContext and it says the source is requested. */ public abstract boolean sourceRequested(); public abstract boolean hasFetchSourceContext(); public abstract FetchSourceContext fetchSourceContext(); public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext); public abstract ContextIndexSearcher searcher(); public abstract IndexShard indexShard(); public abstract MapperService mapperService(); public abstract AnalysisService analysisService(); public abstract IndexQueryParserService queryParserService(); public abstract SimilarityService similarityService(); public abstract ScriptService scriptService(); public abstract PageCacheRecycler pageCacheRecycler(); public abstract BigArrays bigArrays(); public abstract BitsetFilterCache bitsetFilterCache(); public abstract IndexFieldDataService fieldData(); public abstract long timeoutInMillis(); public abstract void timeoutInMillis(long timeoutInMillis); public abstract int terminateAfter(); public abstract void terminateAfter(int terminateAfter); public abstract SearchContext minimumScore(float minimumScore); public abstract Float minimumScore(); public abstract SearchContext sort(Sort sort); public abstract Sort sort(); public abstract SearchContext trackScores(boolean trackScores); public abstract boolean trackScores(); public abstract SearchContext parsedPostFilter(ParsedQuery postFilter); public abstract ParsedQuery parsedPostFilter(); public abstract Query aliasFilter(); public abstract SearchContext parsedQuery(ParsedQuery query); public abstract ParsedQuery parsedQuery(); /** * The query to execute, might be rewritten. */ public abstract Query query(); public abstract int from(); public abstract SearchContext from(int from); public abstract int size(); public abstract SearchContext size(int size); public abstract boolean hasFieldNames(); public abstract List<String> fieldNames(); public abstract void emptyFieldNames(); public abstract boolean explain(); public abstract void explain(boolean explain); @Nullable public abstract List<String> groupStats(); public abstract void groupStats(List<String> groupStats); public abstract boolean version(); public abstract void version(boolean version); public abstract int[] docIdsToLoad(); public abstract int docIdsToLoadFrom(); public abstract int docIdsToLoadSize(); public abstract SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize); public abstract void accessed(long accessTime); public abstract long lastAccessTime(); public abstract long keepAlive(); public abstract void keepAlive(long keepAlive); public abstract SearchLookup lookup(); public abstract DfsSearchResult dfsResult(); public abstract QuerySearchResult queryResult(); public abstract FetchSearchResult fetchResult(); /** * Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object * is function of the provided {@link Lifetime}. */ public void addReleasable(Releasable releasable, Lifetime lifetime) { if (clearables == null) { clearables = new HashMap<>(); } List<Releasable> releasables = clearables.get(lifetime); if (releasables == null) { releasables = new ArrayList<>(); clearables.put(lifetime, releasables); } releasables.add(releasable); } public void clearReleasables(Lifetime lifetime) { if (clearables != null) { List<List<Releasable>>releasables = new ArrayList<>(); for (Lifetime lc : Lifetime.values()) { if (lc.compareTo(lifetime) > 0) { break; } List<Releasable> remove = clearables.remove(lc); if (remove != null) { releasables.add(remove); } } Releasables.close(Iterables.flatten(releasables)); } } public abstract MappedFieldType smartNameFieldType(String name); /** * Looks up the given field, but does not restrict to fields in the types set on this context. */ public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name); public abstract ObjectMapper getObjectMapper(String name); public abstract Counter timeEstimateCounter(); /** Return a view of the additional query collectors that should be run for this context. */ public abstract Map<Class<?>, Collector> queryCollectors(); /** * The life time of an object that is used during search execution. */ public enum Lifetime { /** * This life time is for objects that only live during collection time. */ COLLECTION, /** * This life time is for objects that need to live until the end of the current search phase. */ PHASE, /** * This life time is for objects that need to live until the search context they are attached to is destroyed. */ CONTEXT } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.FullyQualifiedTableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.exceptions.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.master.MasterFileSystem; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MD5Hash; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Test restore snapshots from the client */ @Category(LargeTests.class) public class TestRestoreSnapshotFromClient { final Log LOG = LogFactory.getLog(getClass()); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final byte[] FAMILY = Bytes.toBytes("cf"); private byte[] emptySnapshot; private byte[] snapshotName0; private byte[] snapshotName1; private byte[] snapshotName2; private int snapshot0Rows; private int snapshot1Rows; private FullyQualifiedTableName tableName; private HBaseAdmin admin; @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); TEST_UTIL.getConfiguration().setBoolean("hbase.online.schema.update.enable", true); TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10); TEST_UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 100); TEST_UTIL.getConfiguration().setInt("hbase.client.pause", 250); TEST_UTIL.getConfiguration().setInt("hbase.client.retries.number", 6); TEST_UTIL.getConfiguration().setBoolean( "hbase.master.enabletable.roundrobin", true); TEST_UTIL.startMiniCluster(3); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * Initialize the tests with a table filled with some data * and two snapshots (snapshotName0, snapshotName1) of different states. * The tableName, snapshotNames and the number of rows in the snapshot are initialized. */ @Before public void setup() throws Exception { this.admin = TEST_UTIL.getHBaseAdmin(); long tid = System.currentTimeMillis(); tableName = FullyQualifiedTableName.valueOf("testtb-" + tid); emptySnapshot = Bytes.toBytes("emptySnaptb-" + tid); snapshotName0 = Bytes.toBytes("snaptb0-" + tid); snapshotName1 = Bytes.toBytes("snaptb1-" + tid); snapshotName2 = Bytes.toBytes("snaptb2-" + tid); // create Table and disable it createTable(tableName, FAMILY); admin.disableTable(tableName); // take an empty snapshot admin.snapshot(emptySnapshot, tableName); HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); try { // enable table and insert data admin.enableTable(tableName); loadData(table, 500, FAMILY); snapshot0Rows = TEST_UTIL.countRows(table); admin.disableTable(tableName); // take a snapshot admin.snapshot(snapshotName0, tableName); // enable table and insert more data admin.enableTable(tableName); loadData(table, 500, FAMILY); snapshot1Rows = TEST_UTIL.countRows(table); admin.disableTable(tableName); // take a snapshot of the updated table admin.snapshot(snapshotName1, tableName); // re-enable table admin.enableTable(tableName); } finally { table.close(); } } @After public void tearDown() throws Exception { if (admin.tableExists(tableName)) { TEST_UTIL.deleteTable(tableName); } admin.deleteSnapshot(snapshotName0); admin.deleteSnapshot(snapshotName1); // Ensure the archiver to be empty MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); mfs.getFileSystem().delete( new Path(mfs.getRootDir(), HConstants.HFILE_ARCHIVE_DIRECTORY), true); } @Test public void testRestoreSnapshot() throws IOException { verifyRowCount(tableName, snapshot1Rows); // Restore from snapshot-0 admin.disableTable(tableName); admin.restoreSnapshot(snapshotName0); admin.enableTable(tableName); verifyRowCount(tableName, snapshot0Rows); // Restore from emptySnapshot admin.disableTable(tableName); admin.restoreSnapshot(emptySnapshot); admin.enableTable(tableName); verifyRowCount(tableName, 0); // Restore from snapshot-1 admin.disableTable(tableName); admin.restoreSnapshot(snapshotName1); admin.enableTable(tableName); verifyRowCount(tableName, snapshot1Rows); } @Test public void testRestoreSchemaChange() throws IOException { byte[] TEST_FAMILY2 = Bytes.toBytes("cf2"); HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); // Add one column family and put some data in it admin.disableTable(tableName); admin.addColumn(tableName, new HColumnDescriptor(TEST_FAMILY2)); admin.enableTable(tableName); assertEquals(2, table.getTableDescriptor().getFamilies().size()); HTableDescriptor htd = admin.getTableDescriptor(tableName); assertEquals(2, htd.getFamilies().size()); loadData(table, 500, TEST_FAMILY2); long snapshot2Rows = snapshot1Rows + 500; assertEquals(snapshot2Rows, TEST_UTIL.countRows(table)); assertEquals(500, TEST_UTIL.countRows(table, TEST_FAMILY2)); Set<String> fsFamilies = getFamiliesFromFS(tableName); assertEquals(2, fsFamilies.size()); table.close(); // Take a snapshot admin.disableTable(tableName); admin.snapshot(snapshotName2, tableName); // Restore the snapshot (without the cf) admin.restoreSnapshot(snapshotName0); admin.enableTable(tableName); assertEquals(1, table.getTableDescriptor().getFamilies().size()); try { TEST_UTIL.countRows(table, TEST_FAMILY2); fail("family '" + Bytes.toString(TEST_FAMILY2) + "' should not exists"); } catch (NoSuchColumnFamilyException e) { // expected } assertEquals(snapshot0Rows, TEST_UTIL.countRows(table)); htd = admin.getTableDescriptor(tableName); assertEquals(1, htd.getFamilies().size()); fsFamilies = getFamiliesFromFS(tableName); assertEquals(1, fsFamilies.size()); table.close(); // Restore back the snapshot (with the cf) admin.disableTable(tableName); admin.restoreSnapshot(snapshotName2); admin.enableTable(tableName); htd = admin.getTableDescriptor(tableName); assertEquals(2, htd.getFamilies().size()); assertEquals(2, table.getTableDescriptor().getFamilies().size()); assertEquals(500, TEST_UTIL.countRows(table, TEST_FAMILY2)); assertEquals(snapshot2Rows, TEST_UTIL.countRows(table)); fsFamilies = getFamiliesFromFS(tableName); assertEquals(2, fsFamilies.size()); table.close(); } @Test public void testRestoreSnapshotOfCloned() throws IOException, InterruptedException { FullyQualifiedTableName clonedTableName = FullyQualifiedTableName.valueOf("clonedtb-" + System.currentTimeMillis()); admin.cloneSnapshot(snapshotName0, clonedTableName); verifyRowCount(clonedTableName, snapshot0Rows); admin.disableTable(clonedTableName); admin.snapshot(snapshotName2, clonedTableName); admin.deleteTable(clonedTableName); waitCleanerRun(); admin.cloneSnapshot(snapshotName2, clonedTableName); verifyRowCount(clonedTableName, snapshot0Rows); admin.disableTable(clonedTableName); admin.deleteTable(clonedTableName); } // ========================================================================== // Helpers // ========================================================================== private void createTable(final FullyQualifiedTableName tableName, final byte[]... families) throws IOException { HTableDescriptor htd = new HTableDescriptor(tableName); for (byte[] family: families) { HColumnDescriptor hcd = new HColumnDescriptor(family); htd.addFamily(hcd); } byte[][] splitKeys = new byte[16][]; byte[] hex = Bytes.toBytes("0123456789abcdef"); for (int i = 0; i < 16; ++i) { splitKeys[i] = new byte[] { hex[i] }; } admin.createTable(htd, splitKeys); } public void loadData(final HTable table, int rows, byte[]... families) throws IOException { byte[] qualifier = Bytes.toBytes("q"); table.setAutoFlush(false); while (rows-- > 0) { byte[] value = Bytes.add(Bytes.toBytes(System.currentTimeMillis()), Bytes.toBytes(rows)); byte[] key = Bytes.toBytes(MD5Hash.getMD5AsHex(value)); Put put = new Put(key); put.setDurability(Durability.SKIP_WAL); for (byte[] family: families) { put.add(family, qualifier, value); } table.put(put); } table.flushCommits(); } private void waitCleanerRun() throws InterruptedException { TEST_UTIL.getMiniHBaseCluster().getMaster().getHFileCleaner().choreForTesting(); } private Set<String> getFamiliesFromFS(final FullyQualifiedTableName tableName) throws IOException { MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); Set<String> families = new HashSet<String>(); Path tableDir = FSUtils.getTableDir(mfs.getRootDir(), tableName); for (Path regionDir: FSUtils.getRegionDirs(mfs.getFileSystem(), tableDir)) { for (Path familyDir: FSUtils.getFamilyDirs(mfs.getFileSystem(), regionDir)) { families.add(familyDir.getName()); } } return families; } private void verifyRowCount(final FullyQualifiedTableName tableName, long expectedRows) throws IOException { HTable table = new HTable(TEST_UTIL.getConfiguration(), tableName); assertEquals(expectedRows, TEST_UTIL.countRows(table)); table.close(); } }
/** * Copyright (c) 2013, 2016, The Regents of the University of California, The Cytoscape Consortium * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package org.ndexbio.task.parsingengines; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.logging.Logger; import org.ndexbio.common.access.NdexDatabase; import org.ndexbio.common.models.dao.orientdb.Helper; import org.ndexbio.common.models.dao.orientdb.UserDocDAO; import org.ndexbio.model.exceptions.NdexException; import org.ndexbio.model.object.NdexPropertyValuePair; import org.ndexbio.model.object.NdexProvenanceEventType; import org.ndexbio.model.object.ProvenanceEntity; import org.ndexbio.model.object.SimplePropertyValuePair; import org.ndexbio.common.persistence.orientdb.NdexPersistenceService; import org.ndexbio.common.util.TermStringType; import org.ndexbio.common.util.TermUtilities; import org.ndexbio.model.object.User; import org.ndexbio.model.object.network.NetworkSourceFormat; import org.ndexbio.model.object.network.NetworkSummary; import org.ndexbio.model.tools.ProvenanceHelpers; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.io.Files; /* * Lines in the SIF file specify a source node, a relationship type * (or edge type), and one or more target nodes. * * see: http://wiki.cytoscape.org/Cytoscape_User_Manual/Network_Formats */ public class SifParser implements IParsingEngine { private final File sifFile; private final String sifURI; private final String extendedBinarySIFEdgeHeader = "PARTICIPANT_A INTERACTION_TYPE PARTICIPANT_B INTERACTION_DATA_SOURCE INTERACTION_PUBMED_ID"; private final String extendedBinarySIFAliasHeader = "PARTICIPANT PARTICIPANT_TYPE PARTICIPANT_NAME UNIFICATION_XREF RELATIONSHIP_XREF"; private final String extendedBinarySIFPropertiesHeader = "NAME ORGANISM URI DATASOURCE"; private final List<String> msgBuffer; private static Logger logger = Logger.getLogger("SifParser"); private NdexPersistenceService persistenceService; private String taskDescription; private User loggedInUser; // private TreeSet<String> pubmedIdSet; public SifParser(String fn, String ownerName, NdexDatabase db, String networkName, String taskDescription) throws Exception { Preconditions.checkArgument(!Strings.isNullOrEmpty(fn), "A filename is required"); Preconditions.checkArgument(!Strings.isNullOrEmpty(ownerName), "A network owner name is required"); this.msgBuffer = Lists.newArrayList(); if ( fn.startsWith("/") || fn.matches("^[a-zA-Z]:.*")) this.sifFile = new File(fn); else this.sifFile = new File(getClass().getClassLoader().getResource(fn).toURI()); this.sifURI = sifFile.toURI().toString(); this.persistenceService = new NdexPersistenceService(db); String title = networkName; if ( title == null) title = Files.getNameWithoutExtension(this.sifFile.getName()); persistenceService.createNewNetwork(ownerName, title, null); this.taskDescription = taskDescription; try (UserDocDAO userDocDAO = new UserDocDAO(db.getAConnection())) { loggedInUser = userDocDAO.getUserByAccountName(ownerName); } // addSystemDefaultNamespaces(); } public List<String> getMsgBuffer() { return this.msgBuffer; } public String getSIFURI() { return sifURI; } public File getSifFile() { return sifFile; } /************************************************************************** * Whitespace (space or tab) is used to delimit the names in the simple * interaction file format. However, in some cases spaces are desired in a * node name or edge type. The standard is that, if the file contains any * tab characters, then tabs are used to delimit the fields and spaces are * considered part of the name. If the file contains no tabs, then any * spaces are delimiters that separate names (and names cannot contain * spaces). * @throws JsonProcessingException * @throws NdexException **************************************************************************/ @Override public void parseFile() throws NdexException { try (BufferedReader bufferedReader = new BufferedReader(new FileReader(this.getSifFile()))){ this.getMsgBuffer().add("Parsing lines from " + this.getSIFURI()); boolean extendedBinarySIF = checkForExtendedFormat(); if (extendedBinarySIF) { this.processExtendedBinarySIF(bufferedReader); // this.networkService.setFormat("EXTENDED_BINARY_SIF"); } else { boolean tabDelimited = scanForTabs(); this.processSimpleSIFLines(tabDelimited, bufferedReader); // this.networkService.setFormat("BINARY_SIF"); } //add provenance to network NetworkSummary currentNetwork = this.persistenceService.getCurrentNetwork(); // set the source format this.persistenceService.setNetworkSourceFormat(NetworkSourceFormat.SIF); String uri = NdexDatabase.getURIPrefix(); // close database connection this.persistenceService.persistNetwork(); ProvenanceEntity provEntity = ProvenanceHelpers.createProvenanceHistory(currentNetwork, uri, NdexProvenanceEventType.FILE_UPLOAD, currentNetwork.getCreationTime(), (ProvenanceEntity)null); Helper.populateProvenanceEntity(provEntity, currentNetwork); provEntity.getCreationEvent().setEndedAtTime(currentNetwork.getModificationTime()); List<SimplePropertyValuePair> l = provEntity.getCreationEvent().getProperties(); Helper.addUserInfoToProvenanceEventProperties( l, loggedInUser); l.add( new SimplePropertyValuePair ( "filename",taskDescription) ); this.persistenceService.setNetworkProvenance(provEntity); persistenceService.commit(); } catch (Exception e) { // delete network and close the database connection e.printStackTrace(); this.persistenceService.abortTransaction(); throw new NdexException("Error occurred when loading file " + this.sifFile.getName() + ". " + e.getMessage() ); } finally { persistenceService.close(); } } private boolean checkForExtendedFormat() throws IOException { try (BufferedReader bufferedReader = new BufferedReader(new FileReader(this.getSifFile()))){ String line = bufferedReader.readLine(); // Check the first line for the EBS header if ( line.startsWith(extendedBinarySIFEdgeHeader)) { bufferedReader.close(); return true; } } catch (FileNotFoundException e) { e.printStackTrace(); throw new IOException(e); } return false; } private boolean scanForTabs() throws IOException { try (BufferedReader bufferedReader = new BufferedReader(new FileReader(this.getSifFile()))){ String line; int counter = 0; // Check the first 20 lines for tabs while ((line = bufferedReader.readLine()) != null) { if (line.indexOf("\t") != -1) return true; if (counter++ > 20) return false; } } catch (FileNotFoundException e) { e.printStackTrace(); throw new IOException(e); } return false; } private void processSimpleSIFLines(boolean tabDelimited, BufferedReader bufferedReader) throws IOException, ExecutionException, NdexException { try { String line; while ((line = bufferedReader.readLine()) != null) { String[] tokens = null; if (tabDelimited) { tokens = line.split("\t"); } else { tokens = line.split("\\s+"); } if (tokens.length == 1) addNode(tokens[0]); // if (tokens.length == 3) // addEdge(tokens[0], tokens[1], tokens[2]); else if ( tokens.length ==2 ) throw new NdexException ("Invalid data format found in line: " + line); else { for ( int i = 2 ; i < tokens.length; i++ ) addEdge ( tokens[0], tokens[1],tokens[i]); } } } catch (IOException e) { this.getMsgBuffer().add(e.getMessage()); throw e; } finally { bufferedReader.close(); } } /* * Standard Extended Binary SIF has two sections, one for edges and one to * define aliases for terms used in the edges. NDEx Extended Binary SIF has * an additional section that captures some additional network meta-data * * Each section is preceded by its header, so the parsing mode switches as * each header is encountered. We already know that line 0 is the edge * header so we start processing edges on the next line. */ private void processExtendedBinarySIF(BufferedReader bufferedReader) throws IOException, ExecutionException, NdexException { try { // skip the header line bufferedReader.readLine(); String line; int counter = 0; while ((line = bufferedReader.readLine()) != null) { if (line.indexOf(extendedBinarySIFAliasHeader) != -1) { processExtendedBinarySIFAliases(bufferedReader); break; } String[] tokens = null; tokens = line.split("\t"); if (tokens.length > 2) { // "PARTICIPANT_A INTERACTION_TYPE PARTICIPANT_B INTERACTION_DATA_SOURCE INTERACTION_PUBMED_ID"; String subject = tokens[0]; String predicate = tokens[1]; String object = tokens[2]; // String dataSource = null; // ignored for now String[] pubMedIds = null; if (tokens.length > 4 && tokens[4] != null && tokens[4].length()>0) { pubMedIds = tokens[4].split(";"); } Long edgeId = addEdge(subject, predicate, object); counter ++; if ( counter % 2000 == 0 ) { logger.info("processed " + counter + " lines so far. commit this batch."); this.persistenceService.commit(); } if (pubMedIds != null) { List<Long> citationIds = new ArrayList<> (pubMedIds.length); for (String pubMedId : pubMedIds) { String[] pubmedIdTokens = pubMedId.split(":"); if ( pubmedIdTokens.length ==2 ) { if ( pubmedIdTokens[0].equals("Pubmed")) { Long citationId = this.persistenceService.getCitationId( "", NdexPersistenceService.URICitationType, NdexPersistenceService.pmidPrefix + pubmedIdTokens[1], null); citationIds.add(citationId); } else if ( pubmedIdTokens[0].equals("ISBN")){ Long citationId = this.persistenceService.getCitationId( "", NdexPersistenceService.URICitationType, pubMedId, null); citationIds.add(citationId); } else { logger.warning("Unsupported Pubmed id format: " + pubMedId + " found in file.\n line:\n " + line +"\n Ignore this pubmedId.\n" ); } } else if (pubmedIdTokens.length == 1 ) { String pubmedId = pubmedIdTokens[0]; if ( pubmedId.length() > 0 ) { Long citationId = this.persistenceService.getCitationId( "", NdexPersistenceService.URICitationType, NdexPersistenceService.pmidPrefix + pubmedIdTokens[0], null); citationIds.add(citationId); } } else throw new NdexException("Invalid Pubmed format in line: " + line); this.persistenceService.addCitationsToElement(edgeId, citationIds); } } } } } catch (IOException e) { this.getMsgBuffer().add(e.getMessage()); throw e; } finally { bufferedReader.close(); } } private void processExtendedBinarySIFAliases(BufferedReader bufferedReader) throws IOException, ExecutionException, NdexException { // "PARTICIPANT PARTICIPANT_TYPE PARTICIPANT_NAME UNIFICATION_XREF RELATIONSHIP_XREF"; System.out.println("Processing Aliases"); String line; int counter = 0; while ((line = bufferedReader.readLine()) != null) { // System.out.println("-- " + line); if (line.indexOf(extendedBinarySIFPropertiesHeader) != -1) { System.out.println("found properties header"); processExtendedBinarySIFProperties(bufferedReader); break; } else if ("".equals(line)) { // skip blank lines. } else { // System.out.println("aliases: " + line); // Process one line of aliases String[] tokens = null; tokens = line.split("\t"); counter ++; if ( counter % 2000 == 0 ) { logger.info("Aliases processed " + counter + " lines. commit batch."); this.persistenceService.commit(); } if (tokens.length > 2) { String participantIdentifier = tokens[0]; // find the node that represents the term specified by the // participantIdentifier Long participantNodeId = addNode(participantIdentifier); if (participantNodeId == null) break; //String type = tokens[1]; List<String> aliasList = new LinkedList<>(); aliasList.add(tokens[2]); //String name = tokens[2]; // special case processing for "_HUMAN" suffix # this logic is removed from 1.3.2 /* int humanSuffixIndex = name.indexOf("_HUMAN"); if (humanSuffixIndex != -1){ name = name.substring(0, humanSuffixIndex); } //participant.setName(name); this.persistenceService.setNodeName(participantNodeId, name); */ if (tokens.length > 3) { String[] unificationAliases = tokens[3].split(";"); if ( unificationAliases !=null ) { for (String a : unificationAliases) aliasList.add(a); } this.persistenceService.addAliasToNode(participantNodeId,aliasList); if (tokens.length > 4) { String[] relationshipAliases = tokens[4].split(";"); this.persistenceService.setRelatedTermsOnNode(participantNodeId, relationshipAliases); } } } } } } private void processExtendedBinarySIFProperties( BufferedReader bufferedReader) throws IOException, NdexException, ExecutionException { // NAME\tORGANISM\tURI\tDATASOURCE"; // this is currently one line of properties, but perhaps it would be // better to have one property per line. System.out.println("Processing one line of Network Properties"); String line = bufferedReader.readLine(); if (line != null) { String[] values = line.split("\t"); if (values.length > 0 && values[0] != null) { this.persistenceService.setNetworkTitleAndDescription( values[0], null); } List<NdexPropertyValuePair> props = new ArrayList<>(); if (values.length > 1 && values[1] != null) { NdexPropertyValuePair p = new NdexPropertyValuePair ("ORGANISM", values[1]); props.add(p); } if (values.length > 2 && values[2] != null) { NdexPropertyValuePair p = new NdexPropertyValuePair ("URI", values[2]); props.add(p); } if (values.length > 3 && values[3] != null) { // System.out.println("Source: " + values[3]); String source = values[3]; if (source.equals("http://purl.org/pc2/4/pid")){ source = "PID"; } props.add(new NdexPropertyValuePair("Source" , source)); } this.persistenceService.setNetworkProperties(props, null); } } private Long addNode(String name) throws ExecutionException, NdexException { TermStringType stype = TermUtilities.getTermType(name); if ( stype == TermStringType.NAME) { return persistenceService.getNodeIdByName(name); } return persistenceService.getNodeIdByBaseTerm(name); } private Long addEdge(String subject, String predicate, String object) throws ExecutionException, NdexException { Long subjectNodeId = addNode(subject); Long objectNodeId = addNode(object); Long predicateTermId = persistenceService.getBaseTermId(predicate); return persistenceService.getEdge(subjectNodeId, objectNodeId, predicateTermId, null,null,null); } /* private void addSystemDefaultNamespaces() throws NdexException { this.persistenceService.createNamespace2("UniProt", "http://identifiers.org/uniprot/"); this.persistenceService.createNamespace2("Ensembl", "http://ndex.org/Ensembl/"); this.persistenceService.createNamespace2("Pubmed", "http://www.ncbi.nlm.nih.gov/pubmed/"); this.persistenceService.createNamespace2("CHEBI", "http://identifiers.org/chebi/"); this.persistenceService.createNamespace2("Reactome", "http://identifiers.org/reactome/"); this.persistenceService.createNamespace2("RefSeq", "http://identifiers.org/refseq/"); this.persistenceService.createNamespace2("HGNC Symbol","http://identifiers.org/hgnc.symbol/"); this.persistenceService.createNamespace2("HGNC", "http://identifiers.org/hgnc/"); this.persistenceService.createNamespace2("NCBI Gene","http://identifiers.org/ncbigene/"); this.persistenceService.createNamespace2("InChIKey", "http://identifiers.org/inchikey/"); this.persistenceService.createNamespace2("pubchem-substance","http://identifiers.org/pubchem.substance/"); this.persistenceService.createNamespace2("pubchem", "http://identifiers.org/pubchem.compound/"); this.persistenceService.createNamespace2("omim", "http://identifiers.org/omim/"); this.persistenceService.createNamespace2("PROTEIN DATA BANK","http://identifiers.org/pdb/"); this.persistenceService.createNamespace2("Panther Family","http://identifiers.org/panther.family/"); this.persistenceService.createNamespace2("CAS", "http://identifiers.org/cas/"); } */ @Override public UUID getUUIDOfUploadedNetwork() { try { return persistenceService.getCurrentNetwork().getExternalId(); } catch ( Exception e) { e.printStackTrace(); return null; } } }
/* Copyright 2012 Wolfgang Koller - http://www.gofg.at/ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.plugin; import org.apache.cordova.CordovaInterface; import org.apache.cordova.plugin; import org.apache.cordova.plugin; import org.json.JSONArray; import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Iterator; import java.util.Set; import java.util.UUID; import org.json.JSONException; import org.json.JSONObject; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothServerSocket; import android.bluetooth.BluetoothSocket; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.os.Environment; import android.os.Message; import android.os.Parcelable; import android.util.Log; public class BluetoothPlugin extends Plugin { private static final String ACTION_ENABLE = "enable"; private static final String ACTION_DISABLE = "disable"; private static final String ACTION_DISCOVERDEVICES = "discoverDevices"; private static final String ACTION_GETUUIDS = "getUUIDs"; private static final String ACTION_GETBONDEDDEVICES = "getBondedDevices"; private static final String ACTION_CONNECT = "connect"; private static final String ACTION_READ = "read"; private static final String ACTION_READ2 = "read2"; private static final String ACTION_READ3 = "read3"; private static final String ACTION_READ4 = "read4"; private static final String ACTION_WRITE = "write"; private static final String ACTION_READ5 = "read5"; private static final String ACTION_DISCONNECT = "disconnect"; private static String ACTION_UUID = ""; private static String EXTRA_UUID = ""; private BluetoothAdapter m_bluetoothAdapter = null; private BPBroadcastReceiver m_bpBroadcastReceiver = null; private boolean m_discovering = false; private boolean m_gettingUuids = false; private boolean m_discoverable = false; private boolean m_stateChanging = false; private AcceptThread mAcceptThread; private JSONArray m_discoveredDevices = null; private JSONArray m_gotUUIDs = null; private static final String NAME = "BluetoothListen"; // Unique UUID for this application private static final UUID MY_UUID = UUID.fromString("00001101-0000-1000-8000-00805f9b34fb"); BluetoothSocket bluetoothListenSocket = null; private ArrayList<BluetoothSocket> m_bluetoothSockets = new ArrayList<BluetoothSocket>(); /** * Constructor for Bluetooth plugin */ public BluetoothPlugin() { m_bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); m_bpBroadcastReceiver = new BPBroadcastReceiver(); try { Field actionUUID = BluetoothDevice.class.getDeclaredField("ACTION_UUID"); BluetoothPlugin.ACTION_UUID = (String) actionUUID.get(null); Log.d("BluetoothPlugin", "actionUUID: " + actionUUID.getName() + " / " + actionUUID.get(null)); Field extraUUID = BluetoothDevice.class.getDeclaredField("EXTRA_UUID"); BluetoothPlugin.EXTRA_UUID = (String) extraUUID.get(null); Log.d("BluetoothPlugin", "extraUUID: " + extraUUID.getName() + " / " + extraUUID.get(null)); } catch( Exception e ) { Log.e("BluetoothPlugin", e.getMessage() ); } } public synchronized void start() { Log.d("BluetoothPlugin", "start"); // Start the thread to listen on a BluetoothServerSocket if (mAcceptThread == null) { mAcceptThread = new AcceptThread(); mAcceptThread.start(); } // setState(STATE_LISTEN); } /** * Register receiver as soon as we have the context */ @Override public void setContext(CordovaInterface ctx) { super.setContext(ctx); // Register for necessary bluetooth events ctx.getActivity().registerReceiver(m_bpBroadcastReceiver, new IntentFilter( BluetoothAdapter.ACTION_DISCOVERY_FINISHED)); ctx.getActivity().registerReceiver(m_bpBroadcastReceiver, new IntentFilter( BluetoothDevice.ACTION_FOUND)); ctx.getActivity().registerReceiver(m_bpBroadcastReceiver, new IntentFilter(BluetoothPlugin.ACTION_UUID)); //ctx.registerReceiver(m_bpBroadcastReceiver, new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED)); } /** * Execute a bluetooth function */ @SuppressWarnings({ "null", "deprecation" }) @Override public PluginResult execute(String action, JSONArray args, String callbackId) { PluginResult pluginResult = null; //Log.d("BluetoothPlugin", "Action: " + action); // Check if bluetooth is supported at all if( m_bluetoothAdapter == null ) { pluginResult = new PluginResult(PluginResult.Status.ILLEGAL_ACCESS_EXCEPTION, "No bluetooth adapter found"); } else { if (ACTION_ENABLE.equals(action)) { // Check if bluetooth isn't disabled already if( !m_bluetoothAdapter.isEnabled() ) { m_stateChanging = true; ctx.startActivityForResult(this, new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE), 1); while(m_stateChanging) {}; } // Check if bluetooth is enabled now if(m_bluetoothAdapter.isEnabled()) { //start(); pluginResult = new PluginResult(PluginResult.Status.OK, "OK"); } else { pluginResult = new PluginResult(PluginResult.Status.ERROR, "Bluetooth not enabled"); } } // Want to disable bluetooth? else if (ACTION_DISABLE.equals(action)) { if( !m_bluetoothAdapter.disable() && m_bluetoothAdapter.isEnabled() ) { pluginResult = new PluginResult(PluginResult.Status.ERROR, "Unable to disable bluetooth"); } else { pluginResult = new PluginResult(PluginResult.Status.OK, "OK"); } } else if (ACTION_DISCOVERDEVICES.equals(action)) { m_discoveredDevices = new JSONArray(); if (!m_bluetoothAdapter.startDiscovery()) { pluginResult = new PluginResult(PluginResult.Status.ERROR, "Unable to start discovery"); } else { m_discovering = true; // Wait for discovery to finish while (m_discovering) {} Log.d("BluetoothPlugin", "DiscoveredDevices: " + m_discoveredDevices.length()); pluginResult = new PluginResult(PluginResult.Status.OK, m_discoveredDevices); } } // Want to list UUIDs of a certain device else if( ACTION_GETUUIDS.equals(action) ) { try { String address = args.getString(0); Log.d("BluetoothPlugin", "Listing UUIDs for: " + address); // Fetch UUIDs from bluetooth device BluetoothDevice bluetoothDevice = m_bluetoothAdapter.getRemoteDevice(address); Method m = bluetoothDevice.getClass().getMethod("fetchUuidsWithSdp"); Log.d("BluetoothPlugin", "Method: " + m); m.invoke(bluetoothDevice); m_gettingUuids = true; while(m_gettingUuids) {} pluginResult = new PluginResult(PluginResult.Status.OK, m_gotUUIDs); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if ( ACTION_GETBONDEDDEVICES.equals(action) ) { JSONArray bondedDevices = new JSONArray(); Log.d( "BluetoothPlugin", "Getting Bonded List..." ); Set<BluetoothDevice> bondSet = m_bluetoothAdapter.getBondedDevices(); for (Iterator<BluetoothDevice> it = bondSet.iterator(); it.hasNext();) { BluetoothDevice bluetoothDevice = (BluetoothDevice) it.next(); JSONObject deviceInfo = new JSONObject(); try { deviceInfo.put("name", bluetoothDevice.getName()); deviceInfo.put("address", bluetoothDevice.getAddress()); deviceInfo.put("isBonded", true); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } bondedDevices.put(deviceInfo); pluginResult = new PluginResult(PluginResult.Status.OK, bondedDevices); } } // Connect to a given device & uuid endpoint else if( ACTION_CONNECT.equals(action) ) { try { String address = args.getString(0); UUID uuid = UUID.fromString(args.getString(1)); //UUID uuid = UUID.fromString("00001101-0000-1000-8000-00805f9b34fb"); Log.d( "BluetoothPlugin", "Connecting..." ); BluetoothDevice bluetoothDevice = m_bluetoothAdapter.getRemoteDevice(address); BluetoothSocket bluetoothSocket = bluetoothDevice.createRfcommSocketToServiceRecord(uuid); bluetoothSocket.connect(); m_bluetoothSockets.add(bluetoothSocket); int socketId = m_bluetoothSockets.indexOf(bluetoothSocket); pluginResult = new PluginResult(PluginResult.Status.OK, socketId); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if( ACTION_READ.equals(action) ) { try { int socketId = args.getInt(0); //Log.d( "BluetoothPlugin", "Get Data..." ); BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); InputStream inputStream = bluetoothSocket.getInputStream(); Calendar cal = Calendar.getInstance(); Date startTime = cal.getTime(); byte[] buffer = new byte[1024]; // char [] buffer = new char[1024]; String recvdString=""; int i=0; int k=0; int byteCnt=0; boolean j=true; char buf = 0; boolean timeOut=false; while (j) { Calendar newCal = Calendar.getInstance(); Date endTime = newCal.getTime(); if ((endTime.getTime()-startTime.getTime())<60000) { if (inputStream.available()>0) { // Log.d( "BluetoothPlugin", "Time Increment: " + format.format(endTime)); i += inputStream.read(buffer,k,inputStream.available()); k=i; Log.d( "BluetoothPlugin", "i="+i); buf = (char)(buffer[i-1]&0xFF); Log.d( "BluetoothPlugin", "buf="+Integer.toHexString(buffer[i-1]&0xFF)); if ((buf== '#') || (buf==0x0A)|| (buf==(char)0xBB)|| (buf==(char)0xAA)) { //if (timeOut == true) Log.d( "BluetoothPlugin", "Time Out"); j=false; } } } else { timeOut=true; j=false; } /* buffer[i]= (char) inputStream.read(); if ((buffer[i] == '#') || (buffer[i]==0x0A)) { j=false; } i++; */ } if (timeOut) { Log.d( "BluetoothPlugin", "Time Out"); recvdString = "Timeout"; } else { byteCnt = i; recvdString= new String(buffer,0,i);//.toString();//"KBytes" + byteCnt; i=0; String stringByteCnt = String.valueOf(byteCnt); } //buffer = b.toString(); Log.d( "BluetoothPlugin", "String: " + recvdString ); pluginResult = new PluginResult(PluginResult.Status.OK,recvdString); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if( ACTION_READ2.equals(action) ) { try { int socketId = args.getInt(0); Calendar cal = Calendar.getInstance(); Date startTime = cal.getTime(); //Log.d( "BluetoothPlugin", "Get Data..." ); BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); InputStream inputStream = bluetoothSocket.getInputStream(); // DataInputStream dataInputStream = new DataInputStream(inputStream); //char[] buffer = new char[15000]; byte [] buf = new byte[55000]; //byte[] buffer2 = new byte[128]; String recvdString=""; int i=0; int k=0; int byteCnt=0; boolean j=true; SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); Log.d( "BluetoothPlugin", "StartTime: " + format.format(startTime)); boolean timeOut = false; while (j) { Calendar newCal = Calendar.getInstance(); Date endTime = newCal.getTime(); if ((endTime.getTime()-startTime.getTime())<12000) { if (inputStream.available()>0) { // Log.d( "BluetoothPlugin", "Time Increment: " + format.format(endTime)); i += inputStream.read(buf,k,inputStream.available()); k=i; Log.d( "BluetoothPlugin", "i="+i); } //Log.d( "BluetoothPlugin", "i="+dataInputStream); //inputStream.close(); if (i>51180) { //Log.d( "BluetoothPlugin", "i="+i); j= false; //i++; } } else { j=false; timeOut = true; Log.d( "BluetoothPlugin", "ECG Read TimeOut"); } } if (timeOut) { recvdString= "Aborted"; } else { File ecgPath = Environment.getExternalStorageDirectory(); File ecg = new File (ecgPath,"/prago/ecg.txt"); FileWriter fos = new FileWriter(ecg,false); String stringBuf = new String(""); //long byteCnt byteCnt = (i-1)/3; long[] buf2 = new long[byteCnt]; for (k=0;k<byteCnt;k++) { int firstByte = 0; int secondByte = 0; int thirdByte = 0; int fourthByte = 0; int index = k*3; firstByte = (0x000000FF & ((int)buf[index+1])); secondByte = (0x000000FF & ((int)buf[index+2])); thirdByte = (0x000000FF & ((int)buf[index+3])); buf2[k]= ((long) (firstByte << 16 | secondByte << 8 | thirdByte )) & 0xFFFFFFFFL; stringBuf = buf2[k] + ","; fos.write(stringBuf); } fos.flush(); fos.close(); byteCnt = i; recvdString= ecg.getPath(); } i=0; pluginResult = new PluginResult(PluginResult.Status.OK,recvdString); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if( ACTION_READ3.equals(action) ) { try { int socketId = args.getInt(0); Log.d( "BluetoothPlugin", "Get Steth Data..." ); BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); //bluetoothSocket.close(); //bluetoothSocket = m_bluetoothSockets.get(socketId); //bluetoothSocket.connect(); InputStream inputStream = bluetoothSocket.getInputStream(); //inputStream.reset(); //int server_port = 9999; //DatagramSocket s = new DatagramSocket(); //InetAddress local = InetAddress.getByName("192.168.2.7"); //s.connect(local,server_port); //int msg_length=messageStr.length(); //byte[] message = messageStr.getBytes(); //char[] buffer = new char[15000]; //byte [] buf = new byte[10000]; //byte[] buffer2 = new byte[128]; // String recvdString; Calendar cal = Calendar.getInstance(); //byte [] buf = new byte[245000]; Date startTime = cal.getTime(); String recvdString= ""; int i=0; int endofFileDetect=0; byte [] firstChar = new byte[1]; int writetoFile=0; int k=0; long finalbytes=0; boolean startdetect = false; int byteCnt=0; boolean j=true; boolean ecgRec = false; byte [] buf = new byte[10000]; firstChar[0] = 0x52; File stethPath = Environment.getExternalStorageDirectory(); File steth = new File (stethPath,"/prago/steth.wav"); FileOutputStream fos = new FileOutputStream(steth); while (j) { Calendar newCal = Calendar.getInstance(); Date endTime = newCal.getTime(); if ((endTime.getTime()-startTime.getTime())<90000) { if (inputStream.available()>0) { //Log.d( "BluetoothPlugin", "inputStream.available="+inputStream.available()); //byte [] buf = new byte[inputStream.available()]; k = inputStream.read(buf,0,inputStream.available()); //Log.d( "BluetoothPlugin", "buf[0]="+buf[0]); if((writetoFile == 0)) { if((buf[0]&0xFF)== 0x52) { if (k>1) { if ((buf[1]&0xFF) == 0x49) { writetoFile = 1; i=0; } } else { startdetect = true; } } else if (((buf[0]&0xFF)== 0x49) && startdetect == true) { fos.write(firstChar,0,1); writetoFile = 1; i=0; } else { startdetect = false; } } if (writetoFile == 1) { i += k; //Log.d( "BluetoothPlugin", "i="+i); //Log.d( "BluetoothPlugin", "k="+k); fos.write(buf,0,k); //if (k>1)Log.d( "BluetoothPlugin", "buf[k-2]="+Integer.toHexString(buf[k-2]&0xFF)); //Log.d( "BluetoothPlugin", "buf[k-1]="+Integer.toHexString(buf[k-1]&0xFF)); if ((k>1) && ((buf[k-2]&0xFF)==0xAA) && ((buf[k-1]&0xFF)==0xBB)) { endofFileDetect = 2; // Log.d( "BluetoothPlugin", "EoF Detected Multibyte"); } else if ((k==1) && ((buf[0]&0xFF) == 0xAA)) { endofFileDetect = 1; // Log.d( "BluetoothPlugin", "EoF Detected Firstbyte"); } else if (((buf[0]&0xFF)==0xBB) && (endofFileDetect ==1)) { endofFileDetect += 1; // Log.d( "BluetoothPlugin", "EoF Detected Sectbyte"); } else { endofFileDetect = 0; } if (endofFileDetect == 2) { Log.d( "BluetoothPlugin", "File Write Complete"); //Log.d( "BluetoothPlugin", "i="+i); fos.flush(); fos.close(); j= false; //i++; recvdString= steth.getPath(); } } // DatagramPacket p = new DatagramPacket(buf, k,local,server_port); // s.send(p);// DataInputStream dataInputStream = new DataInputStream(inputStream); } //Log.d( "BluetoothPlugin", "i="+dataInputStream); //inputStream.close(); } else { j=false; //timeOut=true; Log.d( "BluetoothPlugin", "Steth Read TimeOut"); //bluetoothSocket.close(); // recvdString= "Aborted"; fos.flush(); fos.close(); recvdString= steth.getPath(); } } pluginResult = new PluginResult(PluginResult.Status.OK,recvdString); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } //--change--// else if( ACTION_READ5.equals(action) ) { try { int socketId = args.getInt(0); Log.d( "BluetoothPlugin", "Transfer Steth Data..." ); BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); //bluetoothSocket.close(); //bluetoothSocket = m_bluetoothSockets.get(socketId); //bluetoothSocket.connect(); InputStream inputStream = bluetoothSocket.getInputStream(); //inputStream.reset(); //int server_port = 9999; //DatagramSocket s = new DatagramSocket(); //InetAddress local = InetAddress.getByName("192.168.2.7"); //s.connect(local,server_port); //int msg_length=messageStr.length(); //byte[] message = messageStr.getBytes(); //char[] buffer = new char[15000]; //byte [] buf = new byte[10000]; //byte[] buffer2 = new byte[128]; //String recvdString; Calendar cal = Calendar.getInstance(); //byte [] buf = new byte[245000]; Date startTime = cal.getTime(); String recvdString= ""; int i=0; int endofFileDetect=0; byte [] firstChar = new byte[1]; int writetoFile=0; int k=0; long finalbytes=0; boolean startdetect = false; int byteCnt=0; boolean j=true; boolean ecgRec = false; byte [] buf = new byte[10000]; firstChar[0] = 0x52; File stethPath = Environment.getExternalStorageDirectory(); File steth = new File (stethPath,"/prago/steth.wav"); FileOutputStream fos = new FileOutputStream(steth); while (j) { Calendar newCal = Calendar.getInstance(); Date endTime = newCal.getTime(); if ((endTime.getTime()-startTime.getTime())<5000) { if (inputStream.available()>0) { // Log.d( "BluetoothPlugin", "inputStream.available="+inputStream.available()); cal = Calendar.getInstance(); startTime = cal.getTime(); //byte [] buf = new byte[inputStream.available()]; k = inputStream.read(buf,0,inputStream.available()); //Log.d( "BluetoothPlugin", "buf[0]="+buf[0]); if((writetoFile == 0)) { if((buf[0]&0xFF)== 0x52) { if (k>1) { if ((buf[1]&0xFF) == 0x49) { writetoFile = 1; i=0; } } else { startdetect = true; } } else if (((buf[0]&0xFF)== 0x49) && startdetect == true) { fos.write(firstChar,0,1); writetoFile = 1; i=0; } else { startdetect = false; } } if (writetoFile == 1) { i += k; //Log.d( "BluetoothPlugin", "i="+i); //Log.d( "BluetoothPlugin", "k="+k); fos.write(buf,0,k); //if (k>1)Log.d( "BluetoothPlugin", "buf[k-2]="+Integer.toHexString(buf[k-2]&0xFF)); //Log.d( "BluetoothPlugin", "buf[k-1]="+Integer.toHexString(buf[k-1]&0xFF)); if ((k>1) && ((buf[k-2]&0xFF)==0xAA) && ((buf[k-1]&0xFF)==0xBB)) { endofFileDetect = 2; // Log.d( "BluetoothPlugin", "EoF Detected Multibyte"); } else if ((k==1) && ((buf[0]&0xFF) == 0xAA)) { endofFileDetect = 1; // Log.d( "BluetoothPlugin", "EoF Detected Firstbyte"); } else if (((buf[0]&0xFF)==0xBB) && (endofFileDetect ==1)) { endofFileDetect += 1; // Log.d( "BluetoothPlugin", "EoF Detected Sectbyte"); } else { endofFileDetect = 0; } if (endofFileDetect == 2) { Log.d( "BluetoothPlugin", "File Write Complete"); //Log.d( "BluetoothPlugin", "i="+i); fos.flush(); fos.close(); j= false; //i++; recvdString= steth.getPath(); } } // DatagramPacket p = new DatagramPacket(buf, k,local,server_port); // s.send(p);// DataInputStream dataInputStream = new DataInputStream(inputStream); } //Log.d( "BluetoothPlugin", "i="+dataInputStream); //inputStream.close(); } else { j=false; //timeOut=true; Log.d( "BluetoothPlugin", "Steth Read TimeOut"); //bluetoothSocket.close(); // recvdString= "Aborted"; fos.flush(); fos.close(); recvdString= steth.getPath(); } } pluginResult = new PluginResult(PluginResult.Status.OK,recvdString); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } //--change--// else if( ACTION_READ4.equals(action) ) { try { start(); // int socketId = args.getInt(0); Log.d( "BluetoothPlugin", "Make Discoverable" ); BluetoothAdapter mBluetoothAdapter = null; ctx.startActivityForResult(this, new Intent(BluetoothAdapter.ACTION_REQUEST_DISCOVERABLE), 1); m_discoverable=true; Calendar cal = Calendar.getInstance(); Date startTime = cal.getTime(); Calendar newCal = Calendar.getInstance(); String recvdString= ""; Date endTime = newCal.getTime(); while(m_discoverable && ((endTime.getTime()-startTime.getTime())<32000)){ newCal = Calendar.getInstance(); endTime = newCal.getTime(); } if (m_discoverable) { recvdString = "No Device"; } else { Log.d( "BluetoothPlugin", "Connected with Remote Device" ); BluetoothSocket bluetoothSocket = bluetoothListenSocket; InputStream inputStream = bluetoothSocket.getInputStream(); int i=0; int k=0; boolean j=true; boolean measurementComplete = false; // boolean measurementOngoing = false; boolean measurementStart = false; float decweight = 0; int [] buf = new int[100]; while(!measurementComplete){ buf[i]= inputStream.read(); if ((i>5) && (buf[i] == 0x02) && (buf[i-6]==0x93) && (buf[i-1]==0x00) && !measurementStart) { measurementStart=true; } if (measurementStart && (buf[i-1]==0x04) && (buf[i-7]==0x93) && (buf[i-2]==0x0)) { measurementComplete = true; measurementStart = false; // measurementOngoing = false; decweight = (buf[i-10]<<8) + buf[i-9]; } i++; Log.d( "BluetoothPlugin", "i="+i); } // String recvdString= new String(buf,0,i,"ISO-8859-1");;//new String(buf,0,i,"ISO-8859-1");//.toString();//"KBytes" + byteCnt; float weight = decweight/100; //weight += decweight/100; recvdString= "" + weight; bluetoothSocket.close(); Log.d( "BluetoothPlugin", "Disconnected with Remote Device" ); } pluginResult = new PluginResult(PluginResult.Status.OK,recvdString); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if( ACTION_WRITE.equals(action) ) { try { int socketId = args.getInt(0); byte[] value = {0x11, 0x0d, 0x44, 0x4d, 0x50}; // byte[] value = {(byte)0x11,(byte)0x0D, (byte)0x0A, (byte)0x44, (byte)0x4D, (byte)0x46}; String string = new String(value); char sendCmd = 'g'; byte sendCmdByte = (byte) sendCmd;//.getBytes("UTF-16LE"); byte[] data = args.getString(1).getBytes("UTF-8"); if (data[0] == sendCmdByte) { data = value; Log.d( "BluetoothPlugin", "Sending Onetouch Ultra2 Commands..." ); } else if (data[0] == 'e') { data = args.getString(1).getBytes("UTF-8"); //Log.d( "BluetoothPlugin", "Sending +tronic Commands..." + args.getString(1)); } else { data = args.getString(1).getBytes("UTF-16LE"); //Log.d( "BluetoothPlugin", "Sending +tronic Commands..." + args.getString(1)); } //Log.d( "BluetoothPlugin", "Write Data..." + string ); BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); OutputStream outputStream = bluetoothSocket.getOutputStream(); outputStream.write(data); outputStream.flush(); //outputStream.close(); //Log.d( "BluetoothPlugin", "Buffer: " + String.valueOf(buffer) ); pluginResult = new PluginResult(PluginResult.Status.OK, "Success"); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else if( ACTION_DISCONNECT.equals(action) ) { try { int socketId = args.getInt(0); // Fetch socket & close it BluetoothSocket bluetoothSocket = m_bluetoothSockets.get(socketId); bluetoothSocket.close(); // Remove socket from internal list m_bluetoothSockets.remove(socketId); // Everything went fine... pluginResult = new PluginResult(PluginResult.Status.OK, "OK"); } catch( Exception e ) { Log.e("BluetoothPlugin", e.toString() + " / " + e.getMessage() ); pluginResult = new PluginResult(PluginResult.Status.JSON_EXCEPTION, e.getMessage()); } } else { pluginResult = new PluginResult(PluginResult.Status.INVALID_ACTION, "Action '" + action + "' not supported"); } } return pluginResult; } /** * Receives activity results */ @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { if( requestCode == 1 ) { m_stateChanging = false; } } /** * Helper class for handling all bluetooth based events */ private class BPBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); //Log.d( "BluetoothPlugin", "Action: " + action ); // Check if we found a new device if (BluetoothDevice.ACTION_FOUND.equals(action)) { BluetoothDevice bluetoothDevice = intent .getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); try { JSONObject deviceInfo = new JSONObject(); deviceInfo.put("name", bluetoothDevice.getName()); deviceInfo.put("address", bluetoothDevice.getAddress()); m_discoveredDevices.put(deviceInfo); } catch (JSONException e) { Log.e("BluetoothPlugin", e.getMessage()); } } // Check if we finished discovering devices else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) { m_discovering = false; } // Check if we found UUIDs else if(BluetoothPlugin.ACTION_UUID.equals(action)) { m_gotUUIDs = new JSONArray(); Parcelable[] parcelUuids = intent.getParcelableArrayExtra(BluetoothPlugin.EXTRA_UUID); if( parcelUuids != null ) { Log.d("BluetoothPlugin", "Found UUIDs: " + parcelUuids.length); // Sort UUIDs into JSON array and return it for( int i = 0; i < parcelUuids.length; i++ ) { m_gotUUIDs.put( parcelUuids[i].toString() ); } m_gettingUuids = false; } } } }; private class AcceptThread extends Thread { // The local server socket private final BluetoothServerSocket mmServerSocket; public AcceptThread() { BluetoothServerSocket tmp = null; // Create a new listening server socket try { tmp = m_bluetoothAdapter.listenUsingRfcommWithServiceRecord(NAME, MY_UUID); } catch (IOException e) { // Log.e(TAG, "listen() failed", e); } mmServerSocket = tmp; } public void run() { Log.d("BluetoothPlugin", "BEGIN mAcceptThread" + this); setName("AcceptThread"); BluetoothSocket socket = null; boolean mstate=true; // Listen to the server socket if we're not connected while (mstate) { try { // This is a blocking call and will only return on a // successful connection or an exception socket = mmServerSocket.accept(); } catch (IOException e) { // Log.e(TAG, "accept() failed", e); break; } // If a connection was accepted if (socket != null) { connected(socket, socket.getRemoteDevice()); break; } } // if (D) Log.i(TAG, "END mAcceptThread"); } public void cancel() { // if (D) Log.d(TAG, "cancel " + this); try { mmServerSocket.close(); } catch (IOException e) { // Log.e(TAG, "close() of server failed", e); } } } public synchronized void connected(BluetoothSocket socket, BluetoothDevice device) { // Cancel the accept thread because we only want to connect to one device if (mAcceptThread != null) {mAcceptThread.cancel(); mAcceptThread = null;} bluetoothListenSocket = socket; // Start the thread to manage the connection and perform transmissions m_discoverable = false; } } /** * This thread runs while listening for incoming connections. It behaves * like a server-side client. It runs until a connection is accepted * (or until cancelled). */
/** * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.xpydev.paycoinj.wallet; import io.xpydev.paycoinj.wallet.KeyChain; import io.xpydev.paycoinj.wallet.DeterministicKeyChain; import io.xpydev.paycoinj.wallet.AbstractKeyChainEventListener; import io.xpydev.paycoinj.wallet.Protos; import io.xpydev.paycoinj.crypto.DeterministicHierarchy; import io.xpydev.paycoinj.crypto.DeterministicKey; import io.xpydev.paycoinj.params.UnitTestParams; import io.xpydev.paycoinj.store.UnreadableWalletException; import io.xpydev.paycoinj.utils.BriefLogFormatter; import io.xpydev.paycoinj.utils.Threading; import com.google.common.collect.Lists; import org.junit.Before; import org.junit.Test; import org.spongycastle.crypto.params.KeyParameter; import java.io.IOException; import java.security.SecureRandom; import java.util.List; import static com.google.common.base.Preconditions.checkNotNull; import io.xpydev.paycoinj.core.Address; import io.xpydev.paycoinj.core.BloomFilter; import io.xpydev.paycoinj.core.ECKey; import io.xpydev.paycoinj.core.Sha256Hash; import io.xpydev.paycoinj.core.Utils; import static org.junit.Assert.*; public class DeterministicKeyChainTest { private DeterministicKeyChain chain; private final byte[] ENTROPY = Sha256Hash.create("don't use a string seed like this in real life".getBytes()).getBytes(); @Before public void setup() { BriefLogFormatter.init(); // You should use a random seed instead. The secs constant comes from the unit test file, so we can compare // serialized data properly. long secs = 1389353062L; chain = new DeterministicKeyChain(ENTROPY, "", secs); chain.setLookaheadSize(10); assertEquals(secs, checkNotNull(chain.getSeed()).getCreationTimeSeconds()); } @Test public void derive() throws Exception { ECKey key1 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); ECKey key2 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); final Address address = new Address(UnitTestParams.get(), "PUfdEiYpNokhQjBJhojQwr1aed3awZhnvV"); assertEquals(address, key1.toAddress(UnitTestParams.get())); assertEquals("PFMhUknNAkieHUMj9oZZqAM63d63NeazW2", key2.toAddress(UnitTestParams.get()).toString()); assertEquals(key1, chain.findKeyFromPubHash(address.getHash160())); assertEquals(key2, chain.findKeyFromPubKey(key2.getPubKey())); key1.sign(Sha256Hash.ZERO_HASH); ECKey key3 = chain.getKey(KeyChain.KeyPurpose.CHANGE); assertEquals("PJyz9bo65H3qj742ch7rx8nRaXWkZRLrTA", key3.toAddress(UnitTestParams.get()).toString()); key3.sign(Sha256Hash.ZERO_HASH); } @Test public void getKeys() throws Exception { chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); chain.getKey(KeyChain.KeyPurpose.CHANGE); chain.maybeLookAhead(); assertEquals(2, chain.getKeys(false).size()); } @Test public void signMessage() throws Exception { ECKey key = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); key.verifyMessage("test", key.signMessage("test")); } @Test public void events() throws Exception { // Check that we get the right events at the right time. final List<List<ECKey>> listenerKeys = Lists.newArrayList(); long secs = 1389353062L; chain = new DeterministicKeyChain(ENTROPY, "", secs); chain.addEventListener(new AbstractKeyChainEventListener() { @Override public void onKeysAdded(List<ECKey> keys) { listenerKeys.add(keys); } }, Threading.SAME_THREAD); assertEquals(0, listenerKeys.size()); chain.setLookaheadSize(5); assertEquals(0, listenerKeys.size()); ECKey key = chain.getKey(KeyChain.KeyPurpose.CHANGE); assertEquals(1, listenerKeys.size()); // 1 event final List<ECKey> firstEvent = listenerKeys.get(0); assertEquals(1, firstEvent.size()); assertTrue(firstEvent.contains(key)); // order is not specified. listenerKeys.clear(); chain.maybeLookAhead(); final List<ECKey> secondEvent = listenerKeys.get(0); assertEquals(12, secondEvent.size()); // (5 lookahead keys, +1 lookahead threshold) * 2 chains listenerKeys.clear(); chain.getKey(KeyChain.KeyPurpose.CHANGE); // At this point we've entered the threshold zone so more keys won't immediately trigger more generations. assertEquals(0, listenerKeys.size()); // 1 event final int lookaheadThreshold = chain.getLookaheadThreshold() + chain.getLookaheadSize(); for (int i = 0; i < lookaheadThreshold; i++) chain.getKey(KeyChain.KeyPurpose.CHANGE); assertEquals(1, listenerKeys.size()); // 1 event assertEquals(1, listenerKeys.get(0).size()); // 1 key. } @Test public void random() { // Can't test much here but verify the constructor worked and the class is functional. The other tests rely on // a fixed seed to be deterministic. chain = new DeterministicKeyChain(new SecureRandom(), 384); chain.setLookaheadSize(10); chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS).sign(Sha256Hash.ZERO_HASH); chain.getKey(KeyChain.KeyPurpose.CHANGE).sign(Sha256Hash.ZERO_HASH); } @Test public void serializeUnencrypted() throws UnreadableWalletException { chain.maybeLookAhead(); DeterministicKey key1 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKey key2 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKey key3 = chain.getKey(KeyChain.KeyPurpose.CHANGE); List<Protos.Key> keys = chain.serializeToProtobuf(); // 1 mnemonic/seed, 1 master key, 1 account key, 2 internal keys, 3 derived, 20 lookahead and 5 lookahead threshold. int numItems = 1 // mnemonic/seed + 1 // master key + 1 // account key + 2 // ext/int parent keys + (chain.getLookaheadSize() + chain.getLookaheadThreshold()) * 2 // lookahead zone on each chain ; assertEquals(numItems, keys.size()); // Get another key that will be lost during round-tripping, to ensure we can derive it again. DeterministicKey key4 = chain.getKey(KeyChain.KeyPurpose.CHANGE); final String EXPECTED_SERIALIZATION = checkSerialization(keys, "deterministic-wallet-serialization.txt"); // Round trip the data back and forth to check it is preserved. int oldLookaheadSize = chain.getLookaheadSize(); chain = DeterministicKeyChain.fromProtobuf(keys, null).get(0); assertEquals(EXPECTED_SERIALIZATION, protoToString(chain.serializeToProtobuf())); assertEquals(key1, chain.findKeyFromPubHash(key1.getPubKeyHash())); assertEquals(key2, chain.findKeyFromPubHash(key2.getPubKeyHash())); assertEquals(key3, chain.findKeyFromPubHash(key3.getPubKeyHash())); assertEquals(key4, chain.getKey(KeyChain.KeyPurpose.CHANGE)); key1.sign(Sha256Hash.ZERO_HASH); key2.sign(Sha256Hash.ZERO_HASH); key3.sign(Sha256Hash.ZERO_HASH); key4.sign(Sha256Hash.ZERO_HASH); assertEquals(oldLookaheadSize, chain.getLookaheadSize()); } @Test(expected = IllegalStateException.class) public void notEncrypted() { chain.toDecrypted("fail"); } @Test(expected = IllegalStateException.class) public void encryptTwice() { chain = chain.toEncrypted("once"); chain = chain.toEncrypted("twice"); } private void checkEncryptedKeyChain(DeterministicKeyChain encChain, DeterministicKey key1) { // Check we can look keys up and extend the chain without the AES key being provided. DeterministicKey encKey1 = encChain.findKeyFromPubKey(key1.getPubKey()); DeterministicKey encKey2 = encChain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); assertFalse(key1.isEncrypted()); assertTrue(encKey1.isEncrypted()); assertEquals(encKey1.getPubKeyPoint(), key1.getPubKeyPoint()); final KeyParameter aesKey = checkNotNull(encChain.getKeyCrypter()).deriveKey("open secret"); encKey1.sign(Sha256Hash.ZERO_HASH, aesKey); encKey2.sign(Sha256Hash.ZERO_HASH, aesKey); assertTrue(encChain.checkAESKey(aesKey)); assertFalse(encChain.checkPassword("access denied")); assertTrue(encChain.checkPassword("open secret")); } @Test public void encryption() throws UnreadableWalletException { DeterministicKey key1 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKeyChain encChain = chain.toEncrypted("open secret"); DeterministicKey encKey1 = encChain.findKeyFromPubKey(key1.getPubKey()); checkEncryptedKeyChain(encChain, key1); // Round-trip to ensure de/serialization works and that we can store two chains and they both deserialize. List<Protos.Key> serialized = encChain.serializeToProtobuf(); List<Protos.Key> doubled = Lists.newArrayListWithExpectedSize(serialized.size() * 2); doubled.addAll(serialized); doubled.addAll(serialized); final List<DeterministicKeyChain> chains = DeterministicKeyChain.fromProtobuf(doubled, encChain.getKeyCrypter()); assertEquals(2, chains.size()); encChain = chains.get(0); checkEncryptedKeyChain(encChain, chain.findKeyFromPubKey(key1.getPubKey())); encChain = chains.get(1); checkEncryptedKeyChain(encChain, chain.findKeyFromPubKey(key1.getPubKey())); DeterministicKey encKey2 = encChain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); // Decrypt and check the keys match. DeterministicKeyChain decChain = encChain.toDecrypted("open secret"); DeterministicKey decKey1 = decChain.findKeyFromPubHash(encKey1.getPubKeyHash()); DeterministicKey decKey2 = decChain.findKeyFromPubHash(encKey2.getPubKeyHash()); assertEquals(decKey1.getPubKeyPoint(), encKey1.getPubKeyPoint()); assertEquals(decKey2.getPubKeyPoint(), encKey2.getPubKeyPoint()); assertFalse(decKey1.isEncrypted()); assertFalse(decKey2.isEncrypted()); assertNotEquals(encKey1.getParent(), decKey1.getParent()); // parts of a different hierarchy // Check we can once again derive keys from the decrypted chain. decChain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS).sign(Sha256Hash.ZERO_HASH); decChain.getKey(KeyChain.KeyPurpose.CHANGE).sign(Sha256Hash.ZERO_HASH); } @Test public void watchingChain() throws UnreadableWalletException { Utils.setMockClock(); DeterministicKey key1 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKey key2 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKey key3 = chain.getKey(KeyChain.KeyPurpose.CHANGE); DeterministicKey key4 = chain.getKey(KeyChain.KeyPurpose.CHANGE); DeterministicKey watchingKey = chain.getWatchingKey(); final String pub58 = watchingKey.serializePubB58(); assertEquals("xpub69KR9epSNBM59KLuasxMU5CyKytMJjBP5HEZ5p8YoGUCpM6cM9hqxB9DDPCpUUtqmw5duTckvPfwpoWGQUFPmRLpxs5jYiTf2u6xRMcdhDf", pub58); watchingKey = DeterministicKey.deserializeB58(null, pub58); watchingKey.setCreationTimeSeconds(100000); chain = DeterministicKeyChain.watch(watchingKey); assertEquals(DeterministicHierarchy.BIP32_STANDARDISATION_TIME_SECS, chain.getEarliestKeyCreationTime()); chain.setLookaheadSize(10); chain.maybeLookAhead(); assertEquals(key1.getPubKeyPoint(), chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS).getPubKeyPoint()); assertEquals(key2.getPubKeyPoint(), chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS).getPubKeyPoint()); final DeterministicKey key = chain.getKey(KeyChain.KeyPurpose.CHANGE); assertEquals(key3.getPubKeyPoint(), key.getPubKeyPoint()); try { // Can't sign with a key from a watching chain. key.sign(Sha256Hash.ZERO_HASH); fail(); } catch (ECKey.MissingPrivateKeyException e) { // Ignored. } // Test we can serialize and deserialize a watching chain OK. List<Protos.Key> serialization = chain.serializeToProtobuf(); checkSerialization(serialization, "watching-wallet-serialization.txt"); chain = DeterministicKeyChain.fromProtobuf(serialization, null).get(0); final DeterministicKey rekey4 = chain.getKey(KeyChain.KeyPurpose.CHANGE); assertEquals(key4.getPubKeyPoint(), rekey4.getPubKeyPoint()); } @Test(expected = IllegalStateException.class) public void watchingCannotEncrypt() throws Exception { final DeterministicKey accountKey = chain.getKeyByPath(DeterministicKeyChain.ACCOUNT_ZERO_PATH); chain = DeterministicKeyChain.watch(accountKey.getPubOnly()); chain = chain.toEncrypted("this doesn't make any sense"); } @Test public void bloom1() { DeterministicKey key2 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); DeterministicKey key1 = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); int numEntries = (((chain.getLookaheadSize() + chain.getLookaheadThreshold()) * 2) // * 2 because of internal/external + chain.numLeafKeysIssued() + 4 // one root key + one account key + two chain keys (internal/external) ) * 2; // because the filter contains keys and key hashes. assertEquals(numEntries, chain.numBloomFilterEntries()); BloomFilter filter = chain.getFilter(numEntries, 0.001, 1); assertTrue(filter.contains(key1.getPubKey())); assertTrue(filter.contains(key1.getPubKeyHash())); assertTrue(filter.contains(key2.getPubKey())); assertTrue(filter.contains(key2.getPubKeyHash())); // The lookahead zone is tested in bloom2 and via KeyChainGroupTest.bloom } @Test public void bloom2() throws Exception { // Verify that if when we watch a key, the filter contains at least 100 keys. DeterministicKey[] keys = new DeterministicKey[100]; for (int i = 0; i < keys.length; i++) keys[i] = chain.getKey(KeyChain.KeyPurpose.RECEIVE_FUNDS); chain = DeterministicKeyChain.watch(chain.getWatchingKey()); int e = chain.numBloomFilterEntries(); BloomFilter filter = chain.getFilter(e, 0.001, 1); for (DeterministicKey key : keys) assertTrue("key " + key, filter.contains(key.getPubKeyHash())); } private String protoToString(List<Protos.Key> keys) { StringBuilder sb = new StringBuilder(); for (Protos.Key key : keys) { sb.append(key.toString()); sb.append("\n"); } return sb.toString().trim(); } private String checkSerialization(List<Protos.Key> keys, String filename) { try { String sb = protoToString(keys); String expected = Utils.getResourceAsString(getClass().getResource(filename)); assertEquals(expected, sb); return expected; } catch (IOException e) { throw new RuntimeException(e); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.core.util.log.Logger; import com.facebook.buck.io.BuildCellRelativePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.WriteFileStep; import com.facebook.buck.util.stream.RichStream; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.SortedSet; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; /** A helper class for {@link RobolectricTest} and {@link RobolectricTestX} */ class RobolectricTestHelper { private static final Logger LOG = Logger.get(RobolectricTestHelper.class); /** * Used by robolectric test runner to get list of resource directories that can be used for tests. */ static final String LIST_OF_RESOURCE_DIRECTORIES_PROPERTY_NAME = "buck.robolectric_res_directories"; static final String LIST_OF_ASSETS_DIRECTORIES_PROPERTY_NAME = "buck.robolectric_assets_directories"; static final String ROBOLECTRIC_MANIFEST = "buck.robolectric_manifest"; static final String ROBOLECTRIC_DEPENDENCY_DIR = "robolectric.dependency.dir"; private final BuildTarget buildTarget; private final Optional<DummyRDotJava> optionalDummyRDotJava; private final Optional<SourcePath> robolectricManifest; private final Optional<SourcePath> robolectricRuntimeDependency; private final ProjectFilesystem projectFilesystem; private final boolean passDirectoriesInFile; private final Path resourceDirectoriesPath; private final Path assetDirectoriesPath; RobolectricTestHelper( BuildTarget buildTarget, Optional<DummyRDotJava> optionalDummyRDotJava, Optional<SourcePath> robolectricRuntimeDependency, Optional<SourcePath> robolectricManifest, ProjectFilesystem projectFilesystem, boolean passDirectoriesInFile) { this.buildTarget = buildTarget; this.optionalDummyRDotJava = optionalDummyRDotJava; this.robolectricRuntimeDependency = robolectricRuntimeDependency; this.robolectricManifest = robolectricManifest; this.projectFilesystem = projectFilesystem; this.passDirectoriesInFile = passDirectoriesInFile; resourceDirectoriesPath = RobolectricTestHelper.getResourceDirectoriesPath(projectFilesystem, buildTarget); assetDirectoriesPath = RobolectricTestHelper.getAssetDirectoriesPath(projectFilesystem, buildTarget); } @VisibleForTesting static Path getResourceDirectoriesPath( ProjectFilesystem projectFilesystem, BuildTarget buildTarget) { return BuildTargetPaths.getGenPath( projectFilesystem, buildTarget, "%s/robolectric-resource-directories"); } @VisibleForTesting static Path getAssetDirectoriesPath( ProjectFilesystem projectFilesystem, BuildTarget buildTarget) { return BuildTargetPaths.getGenPath( projectFilesystem, buildTarget, "%s/robolectric-asset-directories"); } private String getDirectoriesContent( SourcePathResolverAdapter pathResolver, Function<HasAndroidResourceDeps, SourcePath> filter) { String content; if (optionalDummyRDotJava.isPresent()) { Iterable<String> resourceDirectories = getDirs( optionalDummyRDotJava.get().getAndroidResourceDeps().stream().map(filter), pathResolver); content = Joiner.on('\n').join(resourceDirectories); } else { content = ""; } return content; } /** Write resource and asset before test */ void onPreTest(BuildContext buildContext) throws IOException { projectFilesystem.writeContentsToPath( getDirectoriesContent(buildContext.getSourcePathResolver(), HasAndroidResourceDeps::getRes), resourceDirectoriesPath); projectFilesystem.writeContentsToPath( getDirectoriesContent( buildContext.getSourcePathResolver(), HasAndroidResourceDeps::getAssets), assetDirectoriesPath); } void addPreTestSteps(BuildContext buildContext, ImmutableList.Builder<Step> stepsBuilder) { stepsBuilder.addAll( MakeCleanDirectoryStep.of( BuildCellRelativePath.fromCellRelativePath( buildContext.getBuildCellRootPath(), projectFilesystem, resourceDirectoriesPath))); stepsBuilder.add( new WriteFileStep( projectFilesystem, getDirectoriesContent( buildContext.getSourcePathResolver(), HasAndroidResourceDeps::getRes), resourceDirectoriesPath, false)); stepsBuilder.add( new WriteFileStep( projectFilesystem, getDirectoriesContent( buildContext.getSourcePathResolver(), HasAndroidResourceDeps::getAssets), assetDirectoriesPath, false)); } /** Amend jvm args, adding manifest and dependency paths */ void amendVmArgs( ImmutableList.Builder<String> vmArgsBuilder, SourcePathResolverAdapter pathResolver) { if (optionalDummyRDotJava.isPresent()) { ImmutableList<HasAndroidResourceDeps> resourceDeps = optionalDummyRDotJava.get().getAndroidResourceDeps(); vmArgsBuilder.add(getRobolectricResourceDirectoriesArg(pathResolver, resourceDeps)); vmArgsBuilder.add(getRobolectricAssetsDirectories(pathResolver, resourceDeps)); } // Force robolectric to only use local dependency resolution. vmArgsBuilder.add("-Drobolectric.offline=true"); robolectricManifest.ifPresent( s -> vmArgsBuilder.add( String.format( "-D%s=%s", RobolectricTestHelper.ROBOLECTRIC_MANIFEST, pathResolver.getAbsolutePath(s)))); robolectricRuntimeDependency.ifPresent( s -> vmArgsBuilder.add( String.format( "-D%s=%s", RobolectricTestHelper.ROBOLECTRIC_DEPENDENCY_DIR, pathResolver.getAbsolutePath(s)))); } @VisibleForTesting String getRobolectricAssetsDirectories( SourcePathResolverAdapter pathResolver, List<HasAndroidResourceDeps> resourceDeps) { String argValue; if (passDirectoriesInFile) { argValue = "@" + projectFilesystem.resolve(assetDirectoriesPath); } else { argValue = Joiner.on(File.pathSeparator) .join( getDirs( resourceDeps.stream().map(HasAndroidResourceDeps::getAssets), pathResolver)); } return String.format( "-D%s=%s", RobolectricTestHelper.LIST_OF_ASSETS_DIRECTORIES_PROPERTY_NAME, argValue); } @VisibleForTesting String getRobolectricResourceDirectoriesArg( SourcePathResolverAdapter pathResolver, List<HasAndroidResourceDeps> resourceDeps) { String argValue; if (passDirectoriesInFile) { argValue = "@" + projectFilesystem.resolve(resourceDirectoriesPath); } else { argValue = Joiner.on(File.pathSeparator) .join( getDirs(resourceDeps.stream().map(HasAndroidResourceDeps::getRes), pathResolver)); } return String.format( "-D%s=%s", RobolectricTestHelper.LIST_OF_RESOURCE_DIRECTORIES_PROPERTY_NAME, argValue); } private Iterable<String> getDirs( Stream<SourcePath> sourcePathStream, SourcePathResolverAdapter pathResolver) { return sourcePathStream .filter(Objects::nonNull) .map(input -> projectFilesystem.relativize(pathResolver.getAbsolutePath(input))) .filter( input -> { try { if (!projectFilesystem.isDirectory(input)) { throw new RuntimeException( String.format( "Path %s is needed to run robolectric test %s, but was not found.", input, buildTarget)); } return !projectFilesystem.getDirectoryContents(input.getPath()).isEmpty(); } catch (IOException e) { LOG.warn(e, "Error filtering path for Robolectric res/assets."); return true; } }) .map(Object::toString) .collect(Collectors.toList()); } /** get extra run time dependency defined in the test description */ Stream<BuildTarget> getExtraRuntimeDeps( BuildRuleResolver buildRuleResolver, SortedSet<BuildRule> buildDeps) { return Stream.of( // On top of the runtime dependencies of a normal {@link JavaTest}, we need to make // the // {@link DummyRDotJava} and any of its resource deps is available locally (if it // exists) // to run this test. RichStream.from(optionalDummyRDotJava), buildRuleResolver.filterBuildRuleInputs( RichStream.from(optionalDummyRDotJava) .flatMap(input -> input.getAndroidResourceDeps().stream()) .flatMap(input -> Stream.of(input.getRes(), input.getAssets())) .filter(Objects::nonNull)), // It's possible that the user added some tool as a dependency, so make sure we // promote this rules first-order deps to runtime deps, so that these potential // tools are available when this test runs. buildDeps.stream()) .reduce(Stream.empty(), Stream::concat) .map(BuildRule::getBuildTarget); } }
package org.wwarn.surveyor.server.core; /* * #%L * SurveyorCore * %% * Copyright (C) 2013 - 2014 University of Oxford * %% * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the University of Oxford nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * #L% */ import com.google.gwt.core.shared.GWT; import com.sun.nio.file.SensitivityWatchEventModifier; import java.io.IOException; import java.nio.file.*; import java.util.HashMap; import java.util.Map; import java.util.Observable; import java.util.Observer; import java.util.concurrent.CountDownLatch; import java.util.logging.Level; import java.util.logging.Logger; import static java.nio.file.StandardWatchEventKinds.*; import static java.util.logging.Level.INFO; import static java.util.logging.Level.SEVERE; /** * Listen for changes in local indexed file and inform parent class of changes * Takes a while to setup file changed listener */ public class FileChangeMonitor extends Observable { private static Logger logger = Logger.getLogger("SurveyorCore.FileChangeMonitor"); private WatchService watcher; private final Map<WatchKey, Path> keys = new HashMap<WatchKey, Path>(); private Path monitoredFile; private FileChangeMonitor fileChangeMonitor = this; private boolean trace = false; protected FileChangeMonitor() { } protected static class Loader{ static FileChangeMonitor INSTANCE = new FileChangeMonitor(); } @Override public synchronized void addObserver(Observer o) { if(watcher == null) { logger.log(INFO, "FileChangeMonitor::addObserver + init/initSynchronous not yet called - ensure it is called with a path to file to observer, or there will be nothing to observe!"); } super.addObserver(o); } /** * Thread safe lazy instantiation done by JVM, no explicit synchronisation * @return */ public static FileChangeMonitor getInstance(){ return Loader.INSTANCE; } public void init(Path monitoredFile) throws IOException { initNewThread(monitoredFile, new CountDownLatch(1), new CountDownLatch(1)); } /** * Added countdown latches as a synchronization aid to allow better unit testing * Allows one or more threads to wait until a set of operations being performed in other threads completes, * @param monitoredFile * @param start calling start.await() waits till file listner is active and ready * @param stop calling stop.await() allows calling code to wait until a fileChangedEvent is processed * @throws IOException */ protected void initNewThread(Path monitoredFile, CountDownLatch start, CountDownLatch stop) throws IOException { final Runnable watcher = initializeWatcherWithDirectory(monitoredFile, start, stop); final Thread thread = new Thread(watcher); thread.setDaemon(false); thread.start(); } /** * A blocking method to start begin the monitoring of a directory, only exists on thread interrupt * @param monitoredFile * @throws IOException */ public void initSynchronous(Path monitoredFile) throws IOException { final Runnable watcher = initializeWatcherWithDirectory(monitoredFile, new CountDownLatch(1), new CountDownLatch(1)); watcher.run(); } private Runnable initializeWatcherWithDirectory(Path monitoredFile, CountDownLatch start, CountDownLatch stop) throws IOException { if (!Files.isRegularFile(monitoredFile)) { throw new IllegalArgumentException("Input of type File expected"); } final FileSystem fileSystem = FileSystems.getDefault(); watcher = fileSystem.newWatchService(); this.monitoredFile = monitoredFile; final Path monitoredFileParentDirectory = monitoredFile.getParent(); register(monitoredFileParentDirectory); final Runnable watcher = new Watcher(start, stop); return watcher; } @SuppressWarnings("unchecked") static <T> WatchEvent<T> cast(WatchEvent<?> event) { return (WatchEvent<T>) event; } /** * Register the given directory with the WatchService */ private void register(Path dir) throws IOException { // WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_MODIFY); WatchKey key = dir.register(watcher, new WatchEvent.Kind[]{StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_CREATE}, SensitivityWatchEventModifier.HIGH); if (trace) { Path prev = keys.get(key); if (prev == null) { System.out.format("register: %s\n", dir); } else { if (!dir.equals(prev)) { System.out.format("update: %s -> %s\n", prev, dir); } } } keys.put(key, dir); } class Watcher implements Runnable { private final CountDownLatch stopSignal; private final CountDownLatch startSignal; public Watcher(CountDownLatch start, CountDownLatch stop) { startSignal = start; stopSignal = stop; } void processEvents() throws IOException, InterruptedException { //loop forever or until thread interrupted while(!Thread.currentThread().isInterrupted()){ // wait for key to be signalled logger.log(INFO,"Watcher::processEvents"+ "Started the long blocking call"); WatchKey key = watcher.take(); /* This call is blocking until events are present This can take a while complete, hence startSignal given only after this is loaded*/ startSignal.countDown(); logger.log(INFO,"Watcher::processEvents" + "Finished the long blocking call"); Path dir = keys.get(key); if (dir == null) { final String warningmsg = "WatchKey not recognized!!"; logger.log(SEVERE,"Watcher::processEvents", warningmsg); throw new IllegalStateException(warningmsg); } // poll for file system events on the WatchKey for (WatchEvent<?> event : key.pollEvents()) { WatchEvent.Kind kind = event.kind(); // TBD - provide example of how OVERFLOW event is handled if (kind == OVERFLOW) { continue; } // Context for directory entry event is the file name of entry WatchEvent<Path> ev = cast(event); Path name = ev.context(); Path child = dir.resolve(name); if(Files.exists(child) && Files.isSameFile(child, monitoredFile)){ //set event change fileChangeMonitor.setChanged(); fileChangeMonitor.notifyObservers(event); stopSignal.countDown(); } } // reset key and remove from set if directory no longer accessible boolean valid = key.reset(); if (!valid) { keys.remove(key); // all directories are inaccessible if (keys.isEmpty()) { break; } } } } @Override public void run() { try { logger.log(INFO,"Watcher::run", "Entered run state"); processEvents(); } catch (IOException e) { logger.log(SEVERE,"Watcher::run I/O failure while call to processEvents", e); throw new IllegalStateException(e); } catch (InterruptedException e) { logger.log(SEVERE,"Watcher::run Threat interrupted exception", e); throw new IllegalStateException(e); } } } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.batch; import biz.source_code.base64Coder.Base64Coder; import org.broad.igv.PreferenceManager; import org.broad.igv.feature.genome.GenomeManager; import org.broad.igv.ui.AbstractHeadedTest; import org.broad.igv.ui.IGV; import org.broad.igv.util.StringUtils; import org.broad.igv.util.TestUtils; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.HttpURLConnection; import java.net.Socket; import java.net.URL; import static org.junit.Assert.assertEquals; /** * @author jacob * @date 2013-Jul-11 */ public class CommandListenerTest extends AbstractHeadedTest { private static final int port = 60151; @BeforeClass public static void setUpClass() throws Exception { PreferenceManager.getInstance().override(PreferenceManager.PORT_ENABLED, "true"); PreferenceManager.getInstance().override(PreferenceManager.PORT_NUMBER, "" + port); AbstractHeadedTest.setUpClass(); } @Before public void setUp() throws Exception{ super.setUp(); CommandListener.halt(); CommandListener.start(port); IGV.getInstance().loadGenome(TestUtils.defaultGenome, null, true); } @After public void tearDown() throws Exception{ super.tearDown(); CommandListener.halt(); } private static String buildRootURL(){ return String.format("http://localhost:%d/", port); } String genId = "mm10"; @Test public void testGenomeSocket() throws Exception{ String locus = "chr1:1-100"; Socket socket = new Socket("localhost", port); PrintWriter out = new PrintWriter(socket.getOutputStream(), true); BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream())); out.println("genome " + genId); String response = in.readLine(); System.out.println(response); assertEquals(genId, GenomeManager.getInstance().getGenomeId()); } @Test public void testGenomeLink() throws Exception{ String cmd = buildRootURL() + "load?genome=" + genId; connect(cmd); assertEquals(genId, GenomeManager.getInstance().getGenomeId()); } @Test public void testLoadURLLink() throws Exception{ String urlPath = CommandExecutorTest.urlPathSpaces; String name = "mytestfile"; String cmd = buildRootURL() + "load?file=" + urlPath + "&name=" + name; connect(cmd); TestUtils.assertTrackLoaded(igv, name); } @Test public void testLoadURLSocket() throws Exception{ String urlPath = CommandExecutorTest.urlPathSpaces; String name = "mytestfile"; Socket socket = new Socket("localhost", port); PrintWriter out = new PrintWriter(socket.getOutputStream(), true); BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream())); out.println("load " + urlPath + " name=" + name); String response = in.readLine(); System.out.println(response); TestUtils.assertTrackLoaded(igv, name); } @Test public void testLoadFileSpacesSocket() throws Exception{ tstLoadFileSocket(CommandExecutorTest.dirPathSpaces, CommandExecutorTest.fileName01); } @Test public void testLoadFileSpacesPercSocket() throws Exception{ tstLoadFileSocket(CommandExecutorTest.dirPathSpaces, CommandExecutorTest.fileNamePerc); } private void tstLoadFileSocket(String fidir, String finame) throws Exception{ Socket socket = new Socket("localhost", port); PrintWriter out = new PrintWriter(socket.getOutputStream(), true); BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream())); String fileString = String.format("\"%s\"", (new File(fidir, finame)).getPath()); out.println("load " + fileString); String response = in.readLine(); System.out.println(response); TestUtils.assertTrackLoaded(IGV.getInstance(), finame); } @Test public void testLoadFileSpacesLink() throws Exception{ tstLoadFileLink(CommandExecutorTest.dirPathSpaces, CommandExecutorTest.fileName01); } @Test public void testLoadFileSpacesPercLink() throws Exception{ tstLoadFileLink(CommandExecutorTest.dirPathSpaces, CommandExecutorTest.fileNamePerc); } private void tstLoadFileLink(String fidir, String finame) throws Exception{ String fileString = (new File(fidir, finame)).getPath(); String urlPath = StringUtils.encodeURL(fileString); String name = "mytestfile"; String cmd = buildRootURL() + "load?file=" + urlPath + "&name=" + name; connect(cmd); TestUtils.assertTrackLoaded(IGV.getInstance(), name); } private HttpURLConnection connect(String urlStr) throws Exception{ URL url = new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); conn.setRequestProperty("Connection", "Keep-Alive"); conn.connect(); System.out.println(conn.getResponseCode() + ":" + conn.getResponseMessage()); return conn; } @Test public void testSHA1() throws Exception { // Example from WebSocket RFC String guid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"; String clientSocketKey = "dGhlIHNhbXBsZSBub25jZQ=="; String serverSocketKey = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="; String generatedKey = CommandListener.computeResponseKey(clientSocketKey + guid); assertEquals(new String(generatedKey), serverSocketKey); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* @test %W% %E% * @summary Tests the WeakSoftTable class. * * @library ../../../../../unittestlib * @build UnitTestUtilities BasicTest Test * @build TestUtilities * @run main/othervm/policy=policy TestWeakSoftTable */ import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.util.*; import com.sun.jini.collection.WeakSoftTable; public abstract class TestWeakSoftTable extends TestUtilities { static class Key { private static int nextIndex; private final String name; private final int index = ++nextIndex; Key(String name) { this.name = name; } public int hashCode() { return name.hashCode(); } public boolean equals(Object o) { return (o instanceof Key) && name.equals(((Key) o).name); } public String toString() { return "key" + name + "[" + index + "]"; } } static class WeakKey extends WeakSoftTable.WeakKey { WeakKey(Object key) { super(key); } private WeakKey(WeakKey weakKey, ReferenceQueue queue) { super(weakKey, queue); } public WeakSoftTable.RemovableReference copy(ReferenceQueue queue) { return new WeakKey(this, queue); } public String toString() { return String.valueOf(get()); } } static class SoftValue extends WeakSoftTable.SoftValue { SoftValue(WeakKey key, Object value) { super(key, value); } private SoftValue(SoftValue softValue, ReferenceQueue queue) { super(softValue, queue); } public WeakSoftTable.RemovableReference copy(ReferenceQueue queue) { return new SoftValue(this, queue); } public String toString() { return String.valueOf(get()); } } public static Collection tests = new ArrayList(); static final Key keyA = new Key("A"); static final String valA = "valA"; static final Key keyA2 = new Key("A"); static final String valA2 = "valA2"; static final Key keyB = new Key("B"); static final String valB = "valB"; static final String valB2 = "valB2"; static final Key keyX = new Key("X"); static final LazyField hash = new LazyField( "com.sun.jini.collection", "WeakSoftTable", "hash"); public static void main(String[] args) { test(tests); } static Object[] array(Object a) { return new Object[] { a }; } static Object[] array(Object a, Object b) { return new Object[] { a, b }; } static Object[] array(Object a, Object b, Object c, Object d) { return new Object[] { a, b, c, d }; } static boolean contains(Collection c, Object o) { for (Iterator iter = c.iterator(); iter.hasNext(); ) { if (iter.next() == o) { return true; } } return false; } static abstract class LocalTest extends BasicTest { WeakSoftTable table; Object[] keysAndValues; Collection clear; LocalTest(String name, Object[] keysAndValues, Object[] clear, Object result) { super(name + "\n keysAndValues = " + toString(keysAndValues) + (clear == null ? "" : "\n clear = " + toString(clear)), result); /* Hold reference to the keys and values so they don't get GC'ed */ this.keysAndValues = (keysAndValues == null) ? new Object[0] : keysAndValues; this.clear = (clear == null) ? Collections.EMPTY_LIST : Arrays.asList(clear); } Object get(Object key, int index) { SoftValue softValue = (SoftValue) table.get(new WeakKey(key), index); return softValue == null ? null : softValue.get(); } void add(Object key, Object value) { WeakKey weakKey = new WeakKey(key); table.add(weakKey, new SoftValue(weakKey, value)); } Object remove(Object key, int index) { SoftValue softValue = (SoftValue) table.remove(new WeakKey(key), index); return softValue == null ? null : softValue.get(); } void maybeClear(boolean force) { if (clear == null) { return; } for (Iterator iter = getHash().entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); Reference key = (Reference) entry.getKey(); if (contains(clear, key.get())) { key.clear(); key.enqueue(); } List list = (List) entry.getValue(); for (int i = list.size(); --i >= 0; ) { Reference value = (Reference) list.get(i); if (contains(clear, value.get())) { value.clear(); value.enqueue(); } } } /* Force processing queue */ if (force) { remove(keyX, 0); } } void initTable() { table = new WeakSoftTable(); for (int i = 0; i < keysAndValues.length; i += 2) { add(keysAndValues[i], keysAndValues[i+1]); } } Map getHash() { return (Map) hash.get(table); } } static { tests.add(TestGet.localtests); } public static class TestGet extends LocalTest { static Test[] localtests = { new TestGet(keyA, 0, null, null, null), new TestGet(keyA, 33, null, null, null), new TestGet(keyA, 0, array(keyA, valA), null, valA), new TestGet(keyA, 1, array(keyA, valA), null, null), new TestGet(keyA, 0, array(keyA2, valA), null, null), new TestGet(keyA, 0, array(keyB, valA), null, null), new TestGet(keyA, 0, array(keyA, valA), array(keyA), null), new TestGet(keyA, 0, array(keyA, valA), array(valA), null), new TestGet(keyA, 0, array(keyA, valA, keyA, valA2), null, valA), new TestGet(keyA, 1, array(keyA, valA, keyA, valA2), null, valA2), new TestGet(keyA, 0, array(keyA, valA, keyA, valA2), array(keyA), null), new TestGet(keyA, 0, array(keyA, valA, keyA, valA2), array(valA), valA2), new TestGet(keyA, 0, array(keyA, valA, keyA, valA2), array(valA2), valA), new TestGet(keyA, 0, array(keyA, valA, keyA, valA2), array(valA, valA2), null), new TestGet(keyA, 1, array(keyA, valA, keyA, valA2), array(valA), null), new TestGet(keyA, 0, array(keyA, valA, keyB, valB), array(keyB), valA) }; final Object key; final int index; TestGet(Object key, int index, Object[] keysAndValues, Object[] clear, Object result) { super("get(" + key + ", " + index + ")", keysAndValues, clear, result); this.key = key; this.index = index; } public Object run() { initTable(); maybeClear(true); return get(key, index); } public void check(Object result) throws Exception { super.check(result); super.check(get(key, index)); } } static { tests.add(TestAdd.localtests); } public static class TestAdd extends LocalTest { static Test[] localtests = { new TestAdd(keyA, valA, null, null, 0), new TestAdd(keyA, valA2, array(keyA, valA), null, 1), new TestAdd(keyA, valA2, array(keyA, valA), array(keyA), 0), new TestAdd(keyA, valA2, array(keyA, valA), array(valA), 0) }; final Object key; final int index; TestAdd(Object key, Object value, Object[] keysAndValues, Object[] clear, int index) { super("add(" + key + ", " + value + ")", keysAndValues, clear, value); this.key = key; this.index = index; } public Object run() { initTable(); maybeClear(false); add(key, getCompareTo()); return get(key, index); } } static { tests.add(TestRemove.localtests); } public static class TestRemove extends LocalTest { static Test[] localtests = { new TestRemove(keyA, 0, null, null, null, null), new TestRemove(keyA, 0, array(keyA, valA), null, valA, null), new TestRemove(keyA, 0, array(keyA, valA, keyA, valA2), null, valA, valA2) }; final Object key; final int index; final Object nextResult; TestRemove(Object key, int index, Object[] keysAndValues, Object[] clear, Object result, Object nextResult) { super("remove(" + key + ", " + index + ")", keysAndValues, clear, result); this.key = key; this.index = index; this.nextResult = nextResult; } public Object run() { initTable(); maybeClear(false); return remove(key, index); } public void check(Object result) throws Exception { super.check(result); Object get = get(key, index); if (get != nextResult) { throw new FailedException("Wrong next value, found " + get + ", expected " + nextResult); } } } }
package spacesettlers.simulator; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.UUID; import spacesettlers.actions.DoNothingAction; import spacesettlers.actions.AbstractAction; import spacesettlers.clients.ImmutableTeamInfo; import spacesettlers.clients.Team; import spacesettlers.configs.SpaceSettlersConfig; import spacesettlers.objects.Asteroid; import spacesettlers.objects.Base; import spacesettlers.objects.Beacon; import spacesettlers.objects.Drone; import spacesettlers.objects.Flag; import spacesettlers.objects.Ship; import spacesettlers.objects.AbstractActionableObject; import spacesettlers.objects.AbstractObject; import spacesettlers.objects.AiCore; import spacesettlers.objects.powerups.PowerupDoubleHealingBaseEnergy; import spacesettlers.objects.powerups.PowerupDoubleMaxEnergy; import spacesettlers.objects.powerups.PowerupDoubleWeapon; import spacesettlers.objects.powerups.PowerupToggleShield; import spacesettlers.objects.powerups.SpaceSettlersPowerupEnum; import spacesettlers.objects.resources.ResourcePile; import spacesettlers.objects.weapons.AbstractWeapon; import spacesettlers.utilities.Movement; import spacesettlers.utilities.Position; import spacesettlers.utilities.Vector2D; /** * Physics engine for the spacewar simulator. The Toroidal part refers to * wrapping around the edges of the simulation. * * @author amy */ public class Toroidal2DPhysics { /** * Height and width of the simulation */ int height, width; float halfHeight, halfWidth; /** * All objects in the space */ Set<AbstractObject> allObjects; /** * The list of beacons */ Set<Beacon> beacons; /** * The list of asteroids */ Set<Asteroid> asteroids; /** * The list of current AI Cores */ Set<AiCore> cores; /** * The list of bases */ Set<Base> bases; /** * The list of ships */ Set<Ship> ships; /** * The list of drones */ Set<Drone> drones; /** * The ets of flags */ Set<Flag> flags; /** * List of all weapons currently in play */ Set<AbstractWeapon> weapons; /** * A hashmap of objects by their ID */ HashMap<UUID, AbstractObject> objectsById; /** * The timestep used for simulation of physics */ double timeStep; /** * The current timestep iteration */ int currentTimeStep; /** * Maximum velocities (to keep things from going nuts) */ public static final double MAX_TRANSLATIONAL_VELOCITY = 200; public static final double MAX_ANGULAR_VELOCITY = Math.PI; public static final double ENERGY_PENALTY = 0.0005; /** * Handles collisions between spacewar objects */ CollisionHandler collisionHandler; /** * Maximum time step */ int maxTime; /** * Information on all of the teams for sharing (set each time step) */ Set<ImmutableTeamInfo> teamInfo; /** * Constructor for the regular game * * @param simConfig */ public Toroidal2DPhysics(SpaceSettlersConfig simConfig) { height = simConfig.getHeight(); width = simConfig.getWidth(); halfHeight = height / 2.0f; halfWidth = width / 2.0f; allObjects = new LinkedHashSet<AbstractObject>(); timeStep = simConfig.getSimulationTimeStep(); collisionHandler = new CollisionHandler(); beacons = new LinkedHashSet<Beacon>(); asteroids = new LinkedHashSet<Asteroid>(); bases = new LinkedHashSet<Base>(); ships = new LinkedHashSet<Ship>(); drones = new LinkedHashSet<Drone>(); // herr0861 edit cores = new LinkedHashSet<AiCore>(); flags = new LinkedHashSet<Flag>(); weapons = new LinkedHashSet<AbstractWeapon>(); objectsById = new HashMap<UUID, AbstractObject>(); maxTime = simConfig.getSimulationSteps(); teamInfo = new LinkedHashSet<ImmutableTeamInfo>(); } /** * Constructor for unit tests * * @param height * @param width * @param timeStep */ public Toroidal2DPhysics(int height, int width, double timeStep) { super(); this.height = height; this.width = width; this.timeStep = timeStep; halfHeight = height / 2.0f; halfWidth = width / 2.0f; allObjects = new LinkedHashSet<AbstractObject>(); collisionHandler = new CollisionHandler(); beacons = new LinkedHashSet<Beacon>(); asteroids = new LinkedHashSet<Asteroid>(); bases = new LinkedHashSet<Base>(); ships = new LinkedHashSet<Ship>(); drones = new LinkedHashSet<Drone>(); // herr0861 edit flags = new LinkedHashSet<Flag>(); cores = new LinkedHashSet<AiCore>(); weapons = new LinkedHashSet<AbstractWeapon>(); objectsById = new HashMap<UUID, AbstractObject>(); teamInfo = new LinkedHashSet<ImmutableTeamInfo>(); } /** * Make a shallow copy of the space with just the settings copied over and new * array lists created. * * This is used by the cloning (and should not be called otherwise) * * @param other */ private Toroidal2DPhysics(Toroidal2DPhysics other) { super(); this.height = other.height; this.width = other.width; this.timeStep = other.timeStep; this.currentTimeStep = other.currentTimeStep; halfHeight = height / 2.0f; halfWidth = width / 2.0f; allObjects = new LinkedHashSet<AbstractObject>(); collisionHandler = new CollisionHandler(); beacons = new LinkedHashSet<Beacon>(); asteroids = new LinkedHashSet<Asteroid>(); bases = new LinkedHashSet<Base>(); ships = new LinkedHashSet<Ship>(); drones = new LinkedHashSet<Drone>(); // herr0861 edit flags = new LinkedHashSet<Flag>(); cores = new LinkedHashSet<AiCore>(); weapons = new LinkedHashSet<AbstractWeapon>(); objectsById = new HashMap<UUID, AbstractObject>(); maxTime = other.maxTime; teamInfo = new LinkedHashSet<ImmutableTeamInfo>(other.teamInfo); } /** * Add an object to the physics simulation * * @param obj */ public void addObject(AbstractObject obj) { allObjects.add(obj); if (obj instanceof Beacon) { beacons.add((Beacon) obj); } if (obj instanceof AiCore) { cores.add((AiCore) obj); } if (obj instanceof Asteroid) { asteroids.add((Asteroid) obj); } if (obj instanceof Base) { bases.add((Base) obj); } if (obj instanceof Ship) { ships.add((Ship) obj); } if (obj instanceof AbstractWeapon) { weapons.add((AbstractWeapon) obj); } if (obj instanceof Drone) { // herr0861 edit drones.add((Drone) obj); } if (obj instanceof Flag) { flags.add((Flag) obj); } objectsById.put(obj.getId(), obj); } /** * Delete an object from the physics simulation * * @param obj */ public void removeObject(AbstractObject obj) { allObjects.remove(obj); if (obj.getClass() == Beacon.class) { beacons.remove((Beacon) obj); } if (obj.getClass() == Asteroid.class) { asteroids.remove((Asteroid) obj); } if (obj.getClass() == AiCore.class) { cores.remove((AiCore) obj); } if (obj.getClass() == Base.class) { bases.remove((Base) obj); } if (obj.getClass() == Ship.class) { ships.remove((Ship) obj); } if (obj instanceof AbstractWeapon) { weapons.remove((AbstractWeapon) obj); } if (obj instanceof Drone) {// herr0861 edit drones.remove((Drone) obj); } if (obj instanceof Flag) { flags.remove((Flag) obj); } objectsById.remove(obj.getId()); } /** * return object by its ID * * @param id * @return */ public AbstractObject getObjectById(UUID id) { return objectsById.get(id); } /** * Return the list of asteroids * * @return */ public Set<Asteroid> getAsteroids() { return asteroids; } /** * Return the list of beacons * * @return */ public Set<Beacon> getBeacons() { return beacons; } /** * Return the list of bases * * @return */ public Set<Base> getBases() { return bases; } /** * Return the list of ships * * @return */ public Set<Ship> getShips() { return ships; } /** * Return the list of cores currently in play * * @return Set of AiCores */ public Set<AiCore> getCores() { return cores; } /** * Return a list of weapons currently in play * * @return */ public Set<AbstractWeapon> getWeapons() { return weapons; } /** * Return a list of drones currently in play * * @return */ public Set<Drone> getDrones() { // herr0861 edit return drones; } /** * Return the list of flags currently in play * * @return */ public Set<Flag> getFlags() { return flags; } /** * Return the Environment height * * @return */ public int getHeight() { return height; } /** * Return the Environment width * * @return */ public int getWidth() { return width; } /** * Return the timestep duration * * @return */ public double getTimestepDuration() { return timeStep; } /** * Return the timestep * * @return */ public int getCurrentTimestep() { return currentTimeStep; } /** * Returns a new random free location in space * * @param rand Random number generator * @param radius the radius around the new location that must be free * @return */ public Position getRandomFreeLocation(Random rand, int radius) { Position randLocation = new Position(rand.nextFloat() * width, rand.nextFloat() * height); while (!isLocationFree(randLocation, radius)) { randLocation = new Position(rand.nextFloat() * width, rand.nextFloat() * height); } return randLocation; } /** * Returns a new random free location in space * * @param rand Random number generator * @param freeRadius the radius around the object that must be free * @return */ public Position getRandomFreeLocationInRegion(Random rand, int freeRadius, int centerX, int centerY, double maxDistance) { Position centerPosition = new Position(centerX, centerY); double newX = ((2 * rand.nextDouble()) - 1) * maxDistance + centerX; double newY = ((2 * rand.nextDouble()) - 1) * maxDistance + centerY; Position randLocation = new Position(newX, newY); toroidalWrap(randLocation); while (!isLocationFree(randLocation, freeRadius) || findShortestDistance(centerPosition, randLocation) > maxDistance) { newX = ((2 * rand.nextDouble()) - 1) * maxDistance + centerX; newY = ((2 * rand.nextDouble()) - 1) * maxDistance + centerY; randLocation = new Position(newX, newY); toroidalWrap(randLocation); } return randLocation; } /** * Returns a new random free location inside the specified box of space * * @param rand Random number generator * @return */ public Position getRandomFreeLocationInRegion(Random rand, int freeRadius, int ULX, int ULY, int LRX, int LRY) { int boxWidth = LRX - ULX; int boxHeight = LRY - ULY; // System.out.println("Making a random location inside UL (x,y) " + ULX + ", " + // ULY + // " to LR (x,y) " + LRY + ", " + LRY); Position centerPosition = new Position(boxWidth / 2 + ULX, boxHeight / 2 + ULY); // System.out.println("Center position is " + centerPosition); double newX = ((2 * rand.nextDouble()) - 1) * (boxWidth / 2.0) + centerPosition.getX(); double newY = ((2 * rand.nextDouble()) - 1) * (boxHeight / 2.0) + centerPosition.getY(); Position randLocation = new Position(newX, newY); toroidalWrap(randLocation); while (!isLocationFree(randLocation, freeRadius)) { newX = ((2 * rand.nextDouble()) - 1) * (boxWidth / 2.0) + centerPosition.getX(); newY = ((2 * rand.nextDouble()) - 1) * (boxHeight / 2.0) + centerPosition.getY(); randLocation = new Position(newX, newY); toroidalWrap(randLocation); } // System.out.println("random location chosen is " + randLocation); return randLocation; } /** * Is the specified location free (within the specified radius)? * * @param location * @param radius * @return true if the location is free and false otherwise */ public boolean isLocationFree(Position location, int radius) { for (AbstractObject object : allObjects) { // fixed bug where it only checked radius and not diameter if (findShortestDistanceVector(object.getPosition(), location) .getMagnitude() <= (radius + (2 * object.getRadius()))) { return false; } } return true; } /** * Public interface to find the shortest toroidal distance from one location to * another. Returns a vector pointing from location1 to location2. Use a * getMagnitude() call on the vector to get the distance. * * @param location1 * @param location2 * @return shortest distance vector pointing from location1 to location2 */ public Vector2D findShortestDistanceVector(Position location1, Position location2) { return findShortestDistanceVector(location1, location2, width, height, halfWidth, halfHeight); } /** * Public interface to find the shortest toroidal distance from one location to * another. Returns a double (the distance). Use findShortestDistanceVector to * get the vector telling you which way to move along this path. Useful if you * just care about distance. * * @param location1 * @param location2 * @return shortest distance length (magnitude of the vector pointing from * location1 to location2) */ public double findShortestDistance(Position location1, Position location2) { Vector2D shortDist = findShortestDistanceVector(location1, location2, width, height, halfWidth, halfHeight); return shortDist.getMagnitude(); } /** * Finds the shortest distance in toroidal space. Returns a vector pointing from * the start to the target location and getMagnitude can be used to find the * distance and the angle. * * @param location1 * @param location2 * @param width * @param height * @param halfWidth * @param halfHeight * @return */ private Vector2D findShortestDistanceVector(Position location1, Position location2, float width, float height, float halfWidth, float halfHeight) { double x = location2.getX() - location1.getX(); double y = location2.getY() - location1.getY(); if (x > halfWidth) { if (y > halfHeight) { return new Vector2D(x - width, y - height); } else if (y < -halfHeight) { return new Vector2D(x - width, y + height); } else { return new Vector2D(x - width, y); } } else if (x < -halfWidth) { if (y > halfHeight) { return new Vector2D(x + width, y - height); } else if (y < -halfHeight) { return new Vector2D(x + width, y + height); } else { return new Vector2D(x + width, y); } } else if (y > halfHeight) { return new Vector2D(x, y - height); } else if (y < -halfHeight) { return new Vector2D(x, y + height); } else { return new Vector2D(x, y); } } /** * Move all moveable objects and handle power ups. */ public void advanceTime(Random rand, int currentTimeStep, Map<UUID, SpaceSettlersPowerupEnum> powerups) { this.currentTimeStep = currentTimeStep; // heal any base injuries for (Base base : bases) { base.updateEnergy(base.getHealingIncrement()); } // detect collisions across all objects detectCollisions(); // get the power ups and create any objects (weapons) as necessary for (UUID key : powerups.keySet()) { AbstractObject swobject = getObjectById(key); // if the object is not alive or it is not actionable or is a drone, then ignore // this if (!swobject.isAlive() || (!(swobject instanceof AbstractActionableObject)) || (swobject instanceof Drone)) {// herr0861 edit continue; } // otherwise, handle the power up handlePowerup((AbstractActionableObject) swobject, powerups.get(key)); } // now move all objects that are moveable (which may include weapons) for (AbstractObject object : allObjects) { // skip non-moveable objects or dead object if (!object.isMoveable() || !object.isAlive()) { continue; } Position currentPosition = object.getPosition(); if (object instanceof Drone) { // herr0861 edit Drone drone = (Drone) object; if (drone.getCurrentAction() == null) { drone.setCurrentAction(this.deepClone()); } AbstractAction action = drone.getCurrentAction(); // AbstractAction action = drone.getDroneAction(this.deepClone()); if (action == null) { action = new DoNothingAction(); // This should never happen, but I'll keep it just in case. System.out.println("Drone doing nothing!"); } /* * Don't actually need to clone this since the user can't modify it, but might * as well to be consistent because maybe in the future people will be allowed * to specify behavior for the drone. */ Movement actionMovement = action.getMovement(this.deepClone(), drone.deepClone()); Position newPosition = applyMovement(currentPosition, actionMovement, timeStep); if (newPosition.isValid()) { drone.setPosition(newPosition); } else { newPosition = currentPosition; } // spend ship energy proportional to its acceleration (old formula used // velocity) and mass (new for space settlers // since resources cost mass) based on update to position (used // to be based on movement command, no result) // double penalty = ENERGY_PENALTY * // -Math.abs(ship.getPosition().getTotalTranslationalVelocity()); double angularAccel = Math.abs(currentPosition.getAngularVelocity() - newPosition.getAngularVelocity()) / timeStep; double angularInertia = (3.0 * drone.getMass() * drone.getRadius() * angularAccel) / 2.0; //double linearAccel = Math.abs(currentPosition.getTranslationalVelocity().getMagnitude() // - newPosition.getTranslationalVelocity().getMagnitude()) / timeStep; double linearAccel = currentPosition.getTranslationalVelocity().subtract(newPosition.getTranslationalVelocity()).getMagnitude() / timeStep; double linearInertia = drone.getMass() * linearAccel; int penalty = (int) Math.floor(0.7 * ENERGY_PENALTY * (angularInertia + linearInertia)); drone.updateEnergy(-penalty); } else if (object.isControllable() && !(object instanceof Drone)) { Ship ship = (Ship) object; AbstractAction action = ship.getCurrentAction(); // handle a null action if (action == null) { action = new DoNothingAction(); } // need to clone the ship and space because otherwise the ship can affect // itself inside AbstractAction Movement actionMovement = action.getMovement(this.deepClone(), ship.deepClone()); Vector2D currentVelocity = currentPosition.getTranslationalVelocity(); double currentAngularVelocity = currentPosition.getAngularVelocity(); Position newPosition = applyMovement(currentPosition, actionMovement, timeStep); // System.out.println("The old position is: [" + currentPosition + "] and the // new position is: [" + newPosition); if (newPosition.isValid()) { ship.setPosition(newPosition); } else { newPosition = currentPosition; } // spend ship energy proportional to its acceleration (old formula used // velocity) and mass (new for space settlers // since resources cost mass) based on update to position (used // to be based on movement command, no result) // double penalty = ENERGY_PENALTY * // -Math.abs(ship.getPosition().getTotalTranslationalVelocity()); double angularAccel = Math.abs(currentPosition.getAngularVelocity() - newPosition.getAngularVelocity()) / timeStep; double angularInertia = (3.0 * ship.getMass() * ship.getRadius() * angularAccel) / 2.0; double linearAccel = currentPosition.getTranslationalVelocity().subtract(newPosition.getTranslationalVelocity()).getMagnitude() / timeStep; double linearInertia = ship.getMass() * linearAccel; int penalty = (int) Math.floor(ENERGY_PENALTY * (angularInertia + linearInertia)); ship.updateEnergy(-penalty); // this isn't the most general fix but it will work for now (also has to be done // for bases) if (ship.isShielded()) { ship.updateEnergy(-PowerupToggleShield.SHIELD_STEP_COST); } // if (!ship.isAlive()) { // System.out.println("Ship " + ship.getTeamName() + ship.getId() + " is dead"); // } } else { // move all other types of objects Position newPosition = moveOneTimestep(currentPosition); object.setPosition(newPosition); if (object instanceof Flag && Double.isNaN(newPosition.getX())) { System.out.println("alive: " + object.isAlive() + " draw: " + object.isDrawable() + " old pos is " + currentPosition + " new position is " + newPosition); System.exit(-1); } } // if any ships or bases are frozen, decrement their frozen count if (object instanceof AbstractActionableObject && !object.isControllable()) { AbstractActionableObject actionable = (AbstractActionableObject) object; actionable.decrementFreezeCount(); } } // go through and see if any bases have died Set<Base> basesClone = new LinkedHashSet<Base>(bases); for (Base base : basesClone) { // this isn't the most general fix but it will work for now (also has to be done // for bases) if (base.isShielded()) { base.updateEnergy(-PowerupToggleShield.SHIELD_STEP_COST); } if (!base.isAlive()) { // if the base died, increment kill and assist count Ship killShip = base.getKillTagTeam(); Ship assistShip = base.getAssistTagTeam(); if (killShip != null) killShip.incrementKillsInflicted(); if (assistShip != null) assistShip.incrementAssistsInflicted(); base.incrementKillsReceived(); base.setAlive(false); removeObject(base); base.getTeam().removeBase(base); } } Set<Drone> dronesClone = new LinkedHashSet<Drone>(drones); for (Drone drone : dronesClone) { if (drone.getEnergy() <= 0 && drone.isAlive() == true) {// drone has died // if the drone died, increment kill and assist count Ship killShip = drone.getKillTagTeam(); Ship assistShip = drone.getAssistTagTeam(); if (killShip != null) killShip.incrementKillsInflicted(); if (assistShip != null) assistShip.incrementAssistsInflicted(); drone.setDeadAndDropObjects(rand, this); // kill the drone dropping the flag and all resources, but no // core. Should we make it have a chance to drop an AiCore? // Probably not. removeObject(drone); drone.getTeam().removeDrone(drone); } } // and see if any ships have died. Doing this here removes unintential side // effects // from when it was called inside updateEnergy for (Ship ship : ships) { if (ship.getEnergy() <= 0 && ship.isAlive() == true) { // if the drone died, increment kill and assist count Ship killShip = ship.getKillTagTeam(); Ship assistShip = ship.getAssistTagTeam(); if (killShip != null) killShip.incrementKillsInflicted(); if (assistShip != null) assistShip.incrementAssistsInflicted(); // mark the ship as having been killed ship.incrementKillsReceived(); // Spawn a new AiCore with the same velocity magnitude and direction as its // parent ship. // handle dropping the core if the ship died Position corePosition = this.getRandomFreeLocationInRegion(rand, AiCore.CORE_RADIUS, (int) ship.getPosition().getX(), (int) ship.getPosition().getY(), 200); corePosition.setTranslationalVelocity(ship.getPosition().getTranslationalVelocity()); corePosition.setAngularVelocity(ship.getPosition().getAngularVelocity()); AiCore shipCore = new AiCore(corePosition, ship.getTeamName(), ship.getTeamColor()); this.addObject(shipCore); // drop any resources that the ship was carrying - this was commented out due to // creating giant piles of resources ResourcePile resources = ship.getResources(); if (resources.getTotal() > 0) { // Position newPosition = ship.getPosition(); // newPosition.setTranslationalVelocity(new Vector2D(0,0)); // newPosition.setAngularVelocity(0.0); // Asteroid newAsteroid = new Asteroid(newPosition, true, ship.getRadius(), // true, resources); // this.addObject(newAsteroid); // distributeResourcesToNearbyAsteroids(ship.getPosition(), resources); // System.out.println("Adding a new asteroid with resources " + // newAsteroid.getResources().getTotal() + // " due to death, total is " + asteroids.size()); // System.out.println("Ship died and " + resources.getTotal() + " has been added // to an asteroid"); } // set the ship to dead last (so we can grab its resources first) // this drops the flag ship.setDeadAndDropObjects(rand, this); } } // verify all tags are still accurate with the new energies for (Base base : bases) { base.updateTags(); } for (Ship ship : ships) { ship.updateTags(); } } /** * Distribute the specified resources to nearby mineable asteroids (this happens * when a ship dies) Right now it drops it on the single nearest asteroid but * that may change if this ends up making massive asteroids * * @param position * @param resources */ private void distributeResourcesToNearbyAsteroids(Position position, ResourcePile resources) { double nearestDistance = Double.MAX_VALUE; Asteroid nearestAsteroid = null; // first find the nearest asteroid for (Asteroid asteroid : asteroids) { double dist = findShortestDistance(position, asteroid.getPosition()); if (dist < nearestDistance) { nearestDistance = dist; nearestAsteroid = asteroid; } } // if it is mineable, just add the resources nearestAsteroid.addResources(resources); if (!nearestAsteroid.isMineable()) { // transform it to mineable nearestAsteroid.setMineable(true); } } /** * Handle power ups for the specified object * * @param swobject * @param spacewarPowerup */ private void handlePowerup(AbstractActionableObject swobject, SpaceSettlersPowerupEnum spacewarPowerup) { switch (spacewarPowerup) { case FIRE_MISSILE: Ship ship = (Ship) swobject; AbstractWeapon weapon = ship.getNewWeapon(SpaceSettlersPowerupEnum.FIRE_MISSILE); if (weapon != null && weapon.isValidWeapon(ship)) { addObject(weapon); weapon.setFiringShip(ship); weapon.applyPowerup(ship); } break; case FIRE_EMP: ship = (Ship) swobject; weapon = ship.getNewWeapon(SpaceSettlersPowerupEnum.FIRE_EMP); if (weapon != null && weapon.isValidWeapon(ship)) { addObject(weapon); weapon.setFiringShip(ship); weapon.applyPowerup(ship); } break; case TOGGLE_SHIELD: PowerupToggleShield toggle = new PowerupToggleShield(); toggle.applyPowerup(swobject); break; case DOUBLE_WEAPON_CAPACITY: PowerupDoubleWeapon weaponDoubler = new PowerupDoubleWeapon(); weaponDoubler.applyPowerup(swobject); break; case DOUBLE_BASE_HEALING_SPEED: PowerupDoubleHealingBaseEnergy baseDoubler = new PowerupDoubleHealingBaseEnergy(); baseDoubler.applyPowerup(swobject); break; case DOUBLE_MAX_ENERGY: PowerupDoubleMaxEnergy maxEnergyDoubler = new PowerupDoubleMaxEnergy(); maxEnergyDoubler.applyPowerup(swobject); break; case FIRE_HEAT_SEEKING_MISSILE: break; case FIRE_TURRET: break; case LAY_MINE: break; default: break; } } /** * Advances one time step using the set velocities * * @param currentPosition * @return */ private Position moveOneTimestep(Position position) { double angularVelocity = position.getAngularVelocity(); double orientation = position.getOrientation() + (angularVelocity * timeStep); if (Double.isNaN(angularVelocity) || Double.isNaN(position.getTotalTranslationalVelocity())) { System.out.println("Help: velocity got set to Nan " + position); } // make sure orientation wraps correctly (-pi to pi) if (orientation > Math.PI) { orientation -= (2 * Math.PI); } else if (orientation < -Math.PI) { orientation += (2 * Math.PI); } // new x,y coordinates double newX = position.getX() + (position.getTranslationalVelocityX() * timeStep); double newY = position.getY() + (position.getTranslationalVelocityY() * timeStep); Position newPosition = new Position(newX, newY, orientation); newPosition.setAngularVelocity(angularVelocity); newPosition.setTranslationalVelocity(position.getTranslationalVelocity()); toroidalWrap(newPosition); return newPosition; } /** * Step through all the objects and ensure they are not colliding. If they are, * call the collision handler for those objects. Sometimes you bounce * (asteroids) and sometimes you pick the object up (beacons), etc. */ private void detectCollisions() { // would prefer to iterate over the set (as this is inefficient) but // the set iterator collides a with b and then b with a, allowing them to // pass through one another! AbstractObject[] allObjectsArray = (AbstractObject[]) allObjects.toArray(new AbstractObject[allObjects.size()]); // loop through all pairs of objects and see if they are colliding for (int i = 0; i < allObjectsArray.length; i++) { AbstractObject object1 = allObjectsArray[i]; if (!object1.isAlive()) { continue; } for (int j = i + 1; j < allObjectsArray.length; j++) { AbstractObject object2 = allObjectsArray[j]; if (!object2.isAlive()) { continue; } // skip them if they are the same object if (object1.equals(object2)) { continue; } double distance = findShortestDistance(object1.getPosition(), object2.getPosition()); if (distance < (object1.getRadius() + object2.getRadius())) { collisionHandler.collide(object1, object2, this); } } } } /** * Takes an acceleration and a simulation time step and moves the object * * @param actionMovement * @param timeStep * @return */ public Position applyMovement(Position position, Movement movement, double timeStep) { Vector2D translationalAcceleration = movement.getTranslationalAcceleration(); double angularAccel = movement.getAngularAccleration(); // velocity is acceleration times time Vector2D translationalVelocity = position.getTranslationalVelocity().add(new Vector2D( translationalAcceleration.getXValue() * timeStep, translationalAcceleration.getYValue() * timeStep)); double angularVelocity = position.getAngularVelocity() + (angularAccel * timeStep); // ensure the max/mins are respected if (translationalVelocity.getMagnitude() > MAX_TRANSLATIONAL_VELOCITY) { double ratio = translationalVelocity.getMagnitude() / MAX_TRANSLATIONAL_VELOCITY; translationalVelocity = translationalVelocity.multiply(1 / ratio); } angularVelocity = checkAngularVelocity(angularVelocity); Position newPosition = new Position(position.getX(), position.getY(), position.getOrientation()); newPosition.setTranslationalVelocity(translationalVelocity); newPosition.setAngularVelocity(angularVelocity); return moveOneTimestep(newPosition); } /** * Ensure the angular velocity doesn't exceed the max * * @param angularVelocity * @return */ private double checkAngularVelocity(double angularVelocity) { if (angularVelocity > MAX_ANGULAR_VELOCITY) { return MAX_ANGULAR_VELOCITY; } else if (angularVelocity < -MAX_ANGULAR_VELOCITY) { return -MAX_ANGULAR_VELOCITY; } else { return angularVelocity; } } /** * Torridial wrap based on the height/width of the enviroment * * @param position */ public void toroidalWrap(Position position) { while (position.getX() < 0) { position.setX(position.getX() + width); } while (position.getY() < 0) { position.setY(position.getY() + height); } position.setX(position.getX() % width); position.setY(position.getY() % height); } /** * Respawns any dead objects in new random locations. Ships have a delay before * they can respawn. Asteroids do not respawn (they are re-created later randomly) */ public void respawnDeadObjects(Random random) { for (AbstractObject object : allObjects) { if (!object.isAlive() && object.canRespawn()) { Position newPosition = null; // flags should re-spawn at a randomly chosen starting location if (object instanceof Flag) { Flag flag = (Flag) object; newPosition = flag.getNewStartingPosition(random); // ensure their starting location is free (to handle the thought bug the class // introduced of putting a ship or a base where the flag should spawn) newPosition = getRandomFreeLocationInRegion(random, flag.getRadius() * 2, (int) newPosition.getX(), (int) newPosition.getY(), flag.getRadius() * 15); } else { // note this is times 4 in order to ensure objects don't spawn touching (and just // to get them a bit farther apart newPosition = getRandomFreeLocation(random, object.getRadius() * 4); } object.setPosition(newPosition); object.setAlive(true); object.setDrawable(true); // reset the UUID if it is a beacon if (object instanceof Beacon) { object.resetId(); } } } } /** * Clones all the objects in space (used for security so the teams can't * manipulate other ships) + * * @return */ public Toroidal2DPhysics deepClone() { Toroidal2DPhysics newSpace = new Toroidal2DPhysics(this); for (AbstractObject swObject : allObjects) { AbstractObject newObject = swObject.deepClone(); newSpace.addObject(newObject); } return newSpace; } /** * Loop through all weapons and remove any dead ones */ public void cleanupDeadWeapons() { ArrayList<AbstractObject> deadObjects = new ArrayList<AbstractObject>(); for (AbstractObject object : allObjects) { if (object instanceof AbstractWeapon && !object.isAlive()) { deadObjects.add(object); } } for (AbstractObject deadObject : deadObjects) { removeObject(deadObject); } } /** * Loop through all cores and remove any dead ones */ public void cleanupDeadCores() { ArrayList<AbstractObject> deadObjects = new ArrayList<AbstractObject>(); for (AiCore core : cores) { if (!core.isAlive()) { deadObjects.add(core); } } for (AbstractObject deadObject : deadObjects) { removeObject(deadObject); } } /** * Loop through all drones and remove any dead ones. herr0861 edit */ public void cleanupDeadDrones() { ArrayList<AbstractObject> deadObjects = new ArrayList<AbstractObject>(); for (Drone drone : drones) { if (!drone.isAlive()) { deadObjects.add(drone); } } for (AbstractObject deadObject : deadObjects) { removeObject(deadObject); } } /** * Loop through all asteroids and remove any dead ones */ public int cleanupAllAndCountMineableDeadAsteroids() { ArrayList<AbstractObject> deadObjects = new ArrayList<AbstractObject>(); int numMineable = 0; for (Asteroid asteroid : asteroids) { if (!asteroid.isAlive()) { deadObjects.add(asteroid); if (asteroid.isMineable()) { numMineable++; } } } for (AbstractObject deadObject : deadObjects) { removeObject(deadObject); } // return the number of mineable ones that were removed return numMineable; } /** * Return the maximum number of time steps for the simulation * * @return */ public int getMaxTime() { return maxTime; } /** * Return all objects * * @return */ public Set<AbstractObject> getAllObjects() { return allObjects; } /** * Check to see if following a straight line path between two given locations * would result in a collision with a provided set of obstructions * * @author Andrew and Thibault * * @param startPosition the starting location of the straight line path * @param goalPosition the ending location of the straight line path * @param obstructions an Set of AbstractObject obstructions (i.e., if you * don't wish to consider mineable asteroids or beacons * obstructions) * @param freeRadius used to determine free space buffer size * @return Whether or not a straight line path between two positions contains * obstructions from a given set */ public boolean isPathClearOfObstructions(Position startPosition, Position goalPosition, Set<AbstractObject> obstructions, int freeRadius) { Vector2D pathToGoal = findShortestDistanceVector(startPosition, goalPosition); // Shortest straight line path // from startPosition to // goalPosition double distanceToGoal = pathToGoal.getMagnitude(); // Distance of straight line path boolean pathIsClear = true; // Boolean showing whether or not the path is clear // Calculate distance between obstruction center and path (including buffer for // ship movement) // Uses hypotenuse * sin(theta) = opposite (on a right hand triangle) Vector2D pathToObstruction; // Vector from start position to obstruction double angleBetween; // Angle between vector from start position to obstruction // Loop through obstructions for (AbstractObject obstruction : obstructions) { // If the distance to the obstruction is greater than the distance to the end // goal, ignore the obstruction // bug fix for radius of obstruction provided by Yo pathToObstruction = findShortestDistanceVector(startPosition, obstruction.getPosition()); if (pathToObstruction.getMagnitude() > distanceToGoal + obstruction.getRadius()) { continue; } // Ignore angles > 90 degrees angleBetween = Math.abs(pathToObstruction.angleBetween(pathToGoal)); if (angleBetween > Math.PI / 2) { continue; } // Compare distance between obstruction and path with buffer distance if (pathToObstruction.getMagnitude() * Math.sin(angleBetween) < obstruction.getRadius() + freeRadius * 1.5) { pathIsClear = false; break; } } return pathIsClear; } /** * Set the team information for this time step * * @param teamInfo */ public void setTeamInfo(Set<ImmutableTeamInfo> teamInfo) { this.teamInfo = teamInfo; } /** * Get the team's information * * @return */ public Set<ImmutableTeamInfo> getTeamInfo() { return teamInfo; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xml.config; import com.intellij.ide.presentation.VirtualFilePresentation; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.roots.ui.configuration.ModulesAlphaComparator; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.ui.ColoredTreeCellRenderer; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.TreeSpeedSearch; import com.intellij.util.containers.Convertor; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import java.util.*; public class ConfigFilesTreeBuilder { private final JTree myTree; public ConfigFilesTreeBuilder(JTree tree) { myTree = tree; installSearch(tree); } public Set<PsiFile> buildTree(DefaultMutableTreeNode root, ConfigFileSearcher... searchers) { final Set<PsiFile> psiFiles = new com.intellij.util.containers.HashSet<PsiFile>(); final MultiMap<Module, PsiFile> files = new MultiMap<Module, PsiFile>(); final MultiMap<VirtualFile, PsiFile> jars = new MultiMap<VirtualFile, PsiFile>(); final MultiMap<VirtualFile, PsiFile> virtualFiles = new MultiMap<VirtualFile, PsiFile>(); for (ConfigFileSearcher searcher : searchers) { files.putAllValues(searcher.getFilesByModules()); jars.putAllValues(searcher.getJars()); virtualFiles.putAllValues(searcher.getVirtualFiles()); } psiFiles.addAll(buildModuleNodes(files, jars, root)); for (Map.Entry<VirtualFile, Collection<PsiFile>> entry : virtualFiles.entrySet()) { DefaultMutableTreeNode node = createFileNode(entry.getKey()); List<PsiFile> list = new ArrayList<PsiFile>(entry.getValue()); Collections.sort(list, FILE_COMPARATOR); for (PsiFile file : list) { node.add(createFileNode(file)); } root.add(node); } return psiFiles; } public DefaultMutableTreeNode addFile(VirtualFile file) { final DefaultMutableTreeNode root = (DefaultMutableTreeNode)myTree.getModel().getRoot(); final DefaultMutableTreeNode treeNode = createFileNode(file); root.add(treeNode); DefaultTreeModel model = (DefaultTreeModel)myTree.getModel(); model.nodeStructureChanged(root); return treeNode; } public Set<PsiFile> buildModuleNodes(final MultiMap<Module, PsiFile> files, final MultiMap<VirtualFile, PsiFile> jars, DefaultMutableTreeNode root) { final HashSet<PsiFile> psiFiles = new HashSet<PsiFile>(); final List<Module> modules = new ArrayList<Module>(files.keySet()); Collections.sort(modules, ModulesAlphaComparator.INSTANCE); for (Module module : modules) { DefaultMutableTreeNode moduleNode = createFileNode(module); root.add(moduleNode); if (files.containsKey(module)) { List<PsiFile> moduleFiles = new ArrayList<PsiFile>(files.get(module)); MultiMap<FileType, PsiFile> filesByType = new MultiMap<FileType, PsiFile>(); for (PsiFile file : moduleFiles) { filesByType.putValue(file.getFileType(), file); } if (hasNonEmptyGroups(filesByType)) { for (Map.Entry<FileType, Collection<PsiFile>> entry : filesByType.entrySet()) { DefaultMutableTreeNode fileTypeNode = createFileNode(entry.getKey()); moduleNode.add(fileTypeNode); addChildrenFiles(psiFiles, fileTypeNode, new ArrayList<PsiFile>(entry.getValue())); } } else { addChildrenFiles(psiFiles, moduleNode, moduleFiles); } } } for (VirtualFile file : jars.keySet()) { final List<PsiFile> list = new ArrayList<PsiFile>(jars.get(file)); final PsiFile jar = list.get(0).getManager().findFile(file); if (jar != null) { final DefaultMutableTreeNode jarNode = createFileNode(jar); root.add(jarNode); Collections.sort(list, FILE_COMPARATOR); for (PsiFile psiFile : list) { jarNode.add(createFileNode(psiFile)); psiFiles.add(psiFile); } } } return psiFiles; } private static String getFileTypeNodeName(FileType fileType) { return fileType.getName() + " context files" ; } private boolean hasNonEmptyGroups(MultiMap<FileType, PsiFile> filesByType) { byte nonEmptyGroups = 0; for (Map.Entry<FileType, Collection<PsiFile>> entry : filesByType.entrySet()) { Collection<PsiFile> files = entry.getValue(); if (files != null && files.size() > 0) nonEmptyGroups++; } return nonEmptyGroups > 1; } private void addChildrenFiles(@NotNull Set<PsiFile> psiFiles, DefaultMutableTreeNode parentNode, @NotNull List<PsiFile> moduleFiles) { Collections.sort(moduleFiles, FILE_COMPARATOR); for (PsiFile file : moduleFiles) { final DefaultMutableTreeNode fileNode = createFileNode(file); parentNode.add(fileNode); psiFiles.add(file); } } protected DefaultMutableTreeNode createFileNode(Object file) { return new DefaultMutableTreeNode(file); } private static final Comparator<PsiFile> FILE_COMPARATOR = new Comparator<PsiFile>() { @Override public int compare(final PsiFile o1, final PsiFile o2) { return StringUtil.naturalCompare(o1.getName(), o2.getName()); } }; public static void renderNode(Object value, boolean expanded, ColoredTreeCellRenderer renderer) { if (!(value instanceof DefaultMutableTreeNode)) return; final Object object = ((DefaultMutableTreeNode)value).getUserObject(); if (object instanceof FileType) { final FileType fileType = (FileType)object; final Icon icon = fileType.getIcon(); renderer.setIcon(icon); renderer.append(getFileTypeNodeName(fileType), SimpleTextAttributes.REGULAR_ATTRIBUTES); } else if (object instanceof Module) { final Module module = (Module)object; final Icon icon = ModuleType.get(module).getIcon(); renderer.setIcon(icon); final String moduleName = module.getName(); renderer.append(moduleName, SimpleTextAttributes.REGULAR_ATTRIBUTES); } else if (object instanceof PsiFile) { final PsiFile psiFile = (PsiFile)object; final Icon icon = psiFile.getIcon(0); renderer.setIcon(icon); final String fileName = psiFile.getName(); renderer.append(fileName, SimpleTextAttributes.REGULAR_ATTRIBUTES); final VirtualFile virtualFile = psiFile.getVirtualFile(); if (virtualFile != null) { String path = virtualFile.getPath(); final int i = path.indexOf(JarFileSystem.JAR_SEPARATOR); if (i >= 0) { path = path.substring(i + JarFileSystem.JAR_SEPARATOR.length()); } renderer.append(" (" + path + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES); } } else if (object instanceof VirtualFile) { VirtualFile file = (VirtualFile)object; renderer.setIcon(VirtualFilePresentation.getIcon(file)); renderer.append(file.getName(), SimpleTextAttributes.REGULAR_ATTRIBUTES); String path = file.getPath(); final int i = path.indexOf(JarFileSystem.JAR_SEPARATOR); if (i >= 0) { path = path.substring(i + JarFileSystem.JAR_SEPARATOR.length()); } renderer.append(" (" + path + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES); } } public static void installSearch(JTree tree) { new TreeSpeedSearch(tree, new Convertor<TreePath, String>() { @Override public String convert(final TreePath treePath) { final Object object = ((DefaultMutableTreeNode)treePath.getLastPathComponent()).getUserObject(); if (object instanceof Module) { return ((Module)object).getName(); } else if (object instanceof PsiFile) { return ((PsiFile)object).getName(); } else if (object instanceof VirtualFile) { return ((VirtualFile)object).getName(); } else { return ""; } } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.coordinator; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; import com.google.inject.Provider; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntMaps; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2LongMap; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.utils.ZKPaths; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.DruidDataSource; import org.apache.druid.client.DruidServer; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.client.ServerInventoryView; import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.client.indexing.IndexingServiceClient; import org.apache.druid.common.config.JacksonConfigManager; import org.apache.druid.curator.ZkEnablementConfig; import org.apache.druid.curator.discovery.ServiceAnnouncer; import org.apache.druid.discovery.DruidLeaderSelector; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.guice.annotations.CoordinatorIndexingServiceDuty; import org.apache.druid.guice.annotations.Self; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.concurrent.ScheduledExecutorFactory; import org.apache.druid.java.util.common.concurrent.ScheduledExecutors; import org.apache.druid.java.util.common.guava.Comparators; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.metadata.MetadataRuleManager; import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.query.DruidMetrics; import org.apache.druid.server.DruidNode; import org.apache.druid.server.coordinator.duty.BalanceSegments; import org.apache.druid.server.coordinator.duty.CompactSegments; import org.apache.druid.server.coordinator.duty.CoordinatorDuty; import org.apache.druid.server.coordinator.duty.EmitClusterStatsAndMetrics; import org.apache.druid.server.coordinator.duty.LogUsedSegments; import org.apache.druid.server.coordinator.duty.MarkAsUnusedOvershadowedSegments; import org.apache.druid.server.coordinator.duty.RunRules; import org.apache.druid.server.coordinator.duty.UnloadUnusedSegments; import org.apache.druid.server.coordinator.rules.LoadRule; import org.apache.druid.server.coordinator.rules.Rule; import org.apache.druid.server.initialization.ZkPathsConfig; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import org.joda.time.DateTime; import org.joda.time.Duration; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** * */ @ManageLifecycle public class DruidCoordinator { /** * This comparator orders "freshest" segments first, i. e. segments with most recent intervals. * * It is used in historical nodes' {@link LoadQueuePeon}s to make historicals load more recent segment first. * * It is also used in {@link DruidCoordinatorRuntimeParams} for {@link * DruidCoordinatorRuntimeParams#getUsedSegments()} - a collection of segments to be considered during some * coordinator run for different {@link CoordinatorDuty}s. The order matters only for {@link * RunRules}, which tries to apply the rules while iterating the segments in the order imposed by * this comparator. In {@link LoadRule} the throttling limit may be hit (via {@link ReplicationThrottler}; see * {@link CoordinatorDynamicConfig#getReplicationThrottleLimit()}). So before we potentially hit this limit, we want * to schedule loading the more recent segments (among all of those that need to be loaded). * * In both {@link LoadQueuePeon}s and {@link RunRules}, we want to load more recent segments first * because presumably they are queried more often and contain are more important data for users, so if the Druid * cluster has availability problems and struggling to make all segments available immediately, at least we try to * make more "important" (more recent) segments available as soon as possible. */ static final Comparator<DataSegment> SEGMENT_COMPARATOR_RECENT_FIRST = Ordering .from(Comparators.intervalsByEndThenStart()) .onResultOf(DataSegment::getInterval) .compound(Ordering.<DataSegment>natural()) .reverse(); private static final EmittingLogger log = new EmittingLogger(DruidCoordinator.class); private final Object lock = new Object(); private final DruidCoordinatorConfig config; private final ZkPathsConfig zkPaths; private final JacksonConfigManager configManager; private final SegmentsMetadataManager segmentsMetadataManager; private final ServerInventoryView serverInventoryView; private final MetadataRuleManager metadataRuleManager; @Nullable // Null if zk is disabled private final CuratorFramework curator; private final ServiceEmitter emitter; private final IndexingServiceClient indexingServiceClient; private final ScheduledExecutorService exec; private final LoadQueueTaskMaster taskMaster; private final Map<String, LoadQueuePeon> loadManagementPeons; private final ServiceAnnouncer serviceAnnouncer; private final DruidNode self; private final Set<CoordinatorDuty> indexingServiceDuties; private final BalancerStrategyFactory factory; private final LookupCoordinatorManager lookupCoordinatorManager; private final DruidLeaderSelector coordLeaderSelector; private final CompactSegments compactSegments; private volatile boolean started = false; private volatile SegmentReplicantLookup segmentReplicantLookup = null; private int cachedBalancerThreadNumber; private ListeningExecutorService balancerExec; private static final String HISTORICAL_MANAGEMENT_DUTIES_DUTY_GROUP = "HistoricalManagementDuties"; private static final String INDEXING_SERVICE_DUTIES_DUTY_GROUP = "IndexingServiceDuties"; private static final String COMPACT_SEGMENTS_DUTIES_DUTY_GROUP = "CompactSegmentsDuties"; @Inject public DruidCoordinator( DruidCoordinatorConfig config, ZkPathsConfig zkPaths, JacksonConfigManager configManager, SegmentsMetadataManager segmentsMetadataManager, ServerInventoryView serverInventoryView, MetadataRuleManager metadataRuleManager, Provider<CuratorFramework> curatorProvider, ServiceEmitter emitter, ScheduledExecutorFactory scheduledExecutorFactory, IndexingServiceClient indexingServiceClient, LoadQueueTaskMaster taskMaster, ServiceAnnouncer serviceAnnouncer, @Self DruidNode self, @CoordinatorIndexingServiceDuty Set<CoordinatorDuty> indexingServiceDuties, BalancerStrategyFactory factory, LookupCoordinatorManager lookupCoordinatorManager, @Coordinator DruidLeaderSelector coordLeaderSelector, CompactSegments compactSegments, ZkEnablementConfig zkEnablementConfig ) { this( config, zkPaths, configManager, segmentsMetadataManager, serverInventoryView, metadataRuleManager, curatorProvider, emitter, scheduledExecutorFactory, indexingServiceClient, taskMaster, serviceAnnouncer, self, new ConcurrentHashMap<>(), indexingServiceDuties, factory, lookupCoordinatorManager, coordLeaderSelector, compactSegments, zkEnablementConfig ); } DruidCoordinator( DruidCoordinatorConfig config, ZkPathsConfig zkPaths, JacksonConfigManager configManager, SegmentsMetadataManager segmentsMetadataManager, ServerInventoryView serverInventoryView, MetadataRuleManager metadataRuleManager, Provider<CuratorFramework> curatorProvider, ServiceEmitter emitter, ScheduledExecutorFactory scheduledExecutorFactory, IndexingServiceClient indexingServiceClient, LoadQueueTaskMaster taskMaster, ServiceAnnouncer serviceAnnouncer, DruidNode self, ConcurrentMap<String, LoadQueuePeon> loadQueuePeonMap, Set<CoordinatorDuty> indexingServiceDuties, BalancerStrategyFactory factory, LookupCoordinatorManager lookupCoordinatorManager, DruidLeaderSelector coordLeaderSelector, CompactSegments compactSegments, ZkEnablementConfig zkEnablementConfig ) { this.config = config; this.zkPaths = zkPaths; this.configManager = configManager; this.segmentsMetadataManager = segmentsMetadataManager; this.serverInventoryView = serverInventoryView; this.metadataRuleManager = metadataRuleManager; if (zkEnablementConfig.isEnabled()) { this.curator = curatorProvider.get(); } else { this.curator = null; } this.emitter = emitter; this.indexingServiceClient = indexingServiceClient; this.taskMaster = taskMaster; this.serviceAnnouncer = serviceAnnouncer; this.self = self; this.indexingServiceDuties = indexingServiceDuties; this.exec = scheduledExecutorFactory.create(1, "Coordinator-Exec--%d"); this.loadManagementPeons = loadQueuePeonMap; this.factory = factory; this.lookupCoordinatorManager = lookupCoordinatorManager; this.coordLeaderSelector = coordLeaderSelector; this.compactSegments = compactSegments; } public boolean isLeader() { return coordLeaderSelector.isLeader(); } public Map<String, LoadQueuePeon> getLoadManagementPeons() { return loadManagementPeons; } /** * @return tier -> { dataSource -> underReplicationCount } map */ public Map<String, Object2LongMap<String>> computeUnderReplicationCountsPerDataSourcePerTier() { final Iterable<DataSegment> dataSegments = segmentsMetadataManager.iterateAllUsedSegments(); return computeUnderReplicationCountsPerDataSourcePerTierForSegments(dataSegments); } /** * segmentReplicantLookup use in this method could potentially be stale since it is only updated on coordinator runs. * However, this is ok as long as the {@param dataSegments} is refreshed/latest as this would at least still ensure * that the stale data in segmentReplicantLookup would be under counting replication levels, * rather than potentially falsely reporting that everything is available. * * @return tier -> { dataSource -> underReplicationCount } map */ public Map<String, Object2LongMap<String>> computeUnderReplicationCountsPerDataSourcePerTierForSegments( Iterable<DataSegment> dataSegments ) { final Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = new HashMap<>(); if (segmentReplicantLookup == null) { return underReplicationCountsPerDataSourcePerTier; } final DateTime now = DateTimes.nowUtc(); for (final DataSegment segment : dataSegments) { final List<Rule> rules = metadataRuleManager.getRulesWithDefault(segment.getDataSource()); for (final Rule rule : rules) { if (!rule.appliesTo(segment, now)) { // Rule did not match. Continue to the next Rule. continue; } if (!rule.canLoadSegments()) { // Rule matched but rule does not and cannot load segments. // Hence, there is no need to update underReplicationCountsPerDataSourcePerTier map break; } rule.updateUnderReplicated(underReplicationCountsPerDataSourcePerTier, segmentReplicantLookup, segment); // Only the first matching rule applies. This is because the Coordinator cycle through all used segments // and match each segment with the first rule that applies. Each segment may only match a single rule. break; } } return underReplicationCountsPerDataSourcePerTier; } public Object2IntMap<String> computeNumsUnavailableUsedSegmentsPerDataSource() { if (segmentReplicantLookup == null) { return Object2IntMaps.emptyMap(); } final Object2IntOpenHashMap<String> numsUnavailableUsedSegmentsPerDataSource = new Object2IntOpenHashMap<>(); final Iterable<DataSegment> dataSegments = segmentsMetadataManager.iterateAllUsedSegments(); for (DataSegment segment : dataSegments) { if (segmentReplicantLookup.getLoadedReplicants(segment.getId()) == 0) { numsUnavailableUsedSegmentsPerDataSource.addTo(segment.getDataSource(), 1); } else { numsUnavailableUsedSegmentsPerDataSource.addTo(segment.getDataSource(), 0); } } return numsUnavailableUsedSegmentsPerDataSource; } public Map<String, Double> getLoadStatus() { final Map<String, Double> loadStatus = new HashMap<>(); final Collection<ImmutableDruidDataSource> dataSources = segmentsMetadataManager.getImmutableDataSourcesWithAllUsedSegments(); for (ImmutableDruidDataSource dataSource : dataSources) { final Set<DataSegment> segments = Sets.newHashSet(dataSource.getSegments()); final int numPublishedSegments = segments.size(); // remove loaded segments for (DruidServer druidServer : serverInventoryView.getInventory()) { final DruidDataSource loadedView = druidServer.getDataSource(dataSource.getName()); if (loadedView != null) { // This does not use segments.removeAll(loadedView.getSegments()) for performance reasons. // Please see https://github.com/apache/druid/pull/5632 and LoadStatusBenchmark for more info. for (DataSegment serverSegment : loadedView.getSegments()) { segments.remove(serverSegment); } } } final int numUnavailableSegments = segments.size(); loadStatus.put( dataSource.getName(), 100 * ((double) (numPublishedSegments - numUnavailableSegments) / (double) numPublishedSegments) ); } return loadStatus; } @Nullable public Long getTotalSizeOfSegmentsAwaitingCompaction(String dataSource) { return compactSegments.getTotalSizeOfSegmentsAwaitingCompaction(dataSource); } @Nullable public AutoCompactionSnapshot getAutoCompactionSnapshotForDataSource(String dataSource) { return compactSegments.getAutoCompactionSnapshot(dataSource); } public Map<String, AutoCompactionSnapshot> getAutoCompactionSnapshot() { return compactSegments.getAutoCompactionSnapshot(); } public CoordinatorDynamicConfig getDynamicConfigs() { return CoordinatorDynamicConfig.current(configManager); } public CoordinatorCompactionConfig getCompactionConfig() { return CoordinatorCompactionConfig.current(configManager); } public void markSegmentAsUnused(DataSegment segment) { log.debug("Marking segment[%s] as unused", segment.getId()); segmentsMetadataManager.markSegmentAsUnused(segment.getId().toString()); } public String getCurrentLeader() { return coordLeaderSelector.getCurrentLeader(); } public void moveSegment( DruidCoordinatorRuntimeParams params, ImmutableDruidServer fromServer, ImmutableDruidServer toServer, DataSegment segment, final LoadPeonCallback callback ) { if (segment == null) { log.makeAlert(new IAE("Can not move null DataSegment"), "Exception moving null segment").emit(); if (callback != null) { callback.execute(); } throw new ISE("Cannot move null DataSegment"); } SegmentId segmentId = segment.getId(); try { if (fromServer.getMetadata().equals(toServer.getMetadata())) { throw new IAE("Cannot move [%s] to and from the same server [%s]", segmentId, fromServer.getName()); } ImmutableDruidDataSource dataSource = params.getDataSourcesSnapshot().getDataSource(segment.getDataSource()); if (dataSource == null) { throw new IAE("Unable to find dataSource for segment [%s] in metadata", segmentId); } // get segment information from SegmentsMetadataManager instead of getting it from fromServer's. // This is useful when SegmentsMetadataManager and fromServer DataSegment's are different for same // identifier (say loadSpec differs because of deep storage migration). final DataSegment segmentToLoad = dataSource.getSegment(segment.getId()); if (segmentToLoad == null) { throw new IAE("No segment metadata found for segment Id [%s]", segment.getId()); } final LoadQueuePeon loadPeon = loadManagementPeons.get(toServer.getName()); if (loadPeon == null) { throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", toServer.getName()); } final LoadQueuePeon dropPeon = loadManagementPeons.get(fromServer.getName()); if (dropPeon == null) { throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", fromServer.getName()); } final ServerHolder toHolder = new ServerHolder(toServer, loadPeon); if (toHolder.getAvailableSize() < segmentToLoad.getSize()) { throw new IAE( "Not enough capacity on server [%s] for segment [%s]. Required: %,d, available: %,d.", toServer.getName(), segmentToLoad, segmentToLoad.getSize(), toHolder.getAvailableSize() ); } final String toLoadQueueSegPath = ZKPaths.makePath(zkPaths.getLoadQueuePath(), toServer.getName(), segmentId.toString()); final LoadPeonCallback loadPeonCallback = () -> { dropPeon.unmarkSegmentToDrop(segmentToLoad); if (callback != null) { callback.execute(); } }; // mark segment to drop before it is actually loaded on server // to be able to account this information in DruidBalancerStrategy immediately dropPeon.markSegmentToDrop(segmentToLoad); try { loadPeon.loadSegment( segmentToLoad, () -> { try { if (serverInventoryView.isSegmentLoadedByServer(toServer.getName(), segment) && (curator == null || curator.checkExists().forPath(toLoadQueueSegPath) == null) && !dropPeon.getSegmentsToDrop().contains(segment)) { dropPeon.dropSegment(segment, loadPeonCallback); } else { loadPeonCallback.execute(); } } catch (Exception e) { throw new RuntimeException(e); } } ); } catch (Exception e) { dropPeon.unmarkSegmentToDrop(segmentToLoad); throw new RuntimeException(e); } } catch (Exception e) { log.makeAlert(e, "Exception moving segment %s", segmentId).emit(); if (callback != null) { callback.execute(); } } } @VisibleForTesting public int getCachedBalancerThreadNumber() { return cachedBalancerThreadNumber; } @VisibleForTesting public ListeningExecutorService getBalancerExec() { return balancerExec; } @LifecycleStart public void start() { synchronized (lock) { if (started) { return; } started = true; coordLeaderSelector.registerListener( new DruidLeaderSelector.Listener() { @Override public void becomeLeader() { DruidCoordinator.this.becomeLeader(); } @Override public void stopBeingLeader() { DruidCoordinator.this.stopBeingLeader(); } } ); } } @LifecycleStop public void stop() { synchronized (lock) { if (!started) { return; } coordLeaderSelector.unregisterListener(); started = false; exec.shutdownNow(); if (balancerExec != null) { balancerExec.shutdownNow(); } } } public void runCompactSegmentsDuty() { final int startingLeaderCounter = coordLeaderSelector.localTerm(); DutiesRunnable compactSegmentsDuty = new DutiesRunnable(makeCompactSegmentsDuty(), startingLeaderCounter, COMPACT_SEGMENTS_DUTIES_DUTY_GROUP); compactSegmentsDuty.run(); } private void becomeLeader() { synchronized (lock) { if (!started) { return; } log.info( "I am the leader of the coordinators, all must bow! Starting coordination in [%s].", config.getCoordinatorStartDelay() ); segmentsMetadataManager.startPollingDatabasePeriodically(); metadataRuleManager.start(); lookupCoordinatorManager.start(); serviceAnnouncer.announce(self); final int startingLeaderCounter = coordLeaderSelector.localTerm(); final List<Pair<? extends DutiesRunnable, Duration>> dutiesRunnables = new ArrayList<>(); dutiesRunnables.add( Pair.of( new DutiesRunnable(makeHistoricalManagementDuties(), startingLeaderCounter, HISTORICAL_MANAGEMENT_DUTIES_DUTY_GROUP), config.getCoordinatorPeriod() ) ); if (indexingServiceClient != null) { dutiesRunnables.add( Pair.of( new DutiesRunnable(makeIndexingServiceDuties(), startingLeaderCounter, INDEXING_SERVICE_DUTIES_DUTY_GROUP), config.getCoordinatorIndexingPeriod() ) ); } for (final Pair<? extends DutiesRunnable, Duration> dutiesRunnable : dutiesRunnables) { // CompactSegmentsDuty can takes a non trival amount of time to complete. // Hence, we schedule at fixed rate to make sure the other tasks still run at approximately every // config.getCoordinatorIndexingPeriod() period. Note that cautious should be taken // if setting config.getCoordinatorIndexingPeriod() lower than the default value. ScheduledExecutors.scheduleAtFixedRate( exec, config.getCoordinatorStartDelay(), dutiesRunnable.rhs, new Callable<ScheduledExecutors.Signal>() { private final DutiesRunnable theRunnable = dutiesRunnable.lhs; @Override public ScheduledExecutors.Signal call() { if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { theRunnable.run(); } if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { // (We might no longer be leader) return ScheduledExecutors.Signal.REPEAT; } else { return ScheduledExecutors.Signal.STOP; } } } ); } } } private void stopBeingLeader() { synchronized (lock) { log.info("I am no longer the leader..."); for (String server : loadManagementPeons.keySet()) { LoadQueuePeon peon = loadManagementPeons.remove(server); peon.stop(); } loadManagementPeons.clear(); serviceAnnouncer.unannounce(self); lookupCoordinatorManager.stop(); metadataRuleManager.stop(); segmentsMetadataManager.stopPollingDatabasePeriodically(); if (balancerExec != null) { balancerExec.shutdownNow(); balancerExec = null; } } } private List<CoordinatorDuty> makeHistoricalManagementDuties() { return ImmutableList.of( new LogUsedSegments(), new UpdateCoordinatorStateAndPrepareCluster(), new RunRules(DruidCoordinator.this), new UnloadUnusedSegments(), new MarkAsUnusedOvershadowedSegments(DruidCoordinator.this), new BalanceSegments(DruidCoordinator.this), new EmitClusterStatsAndMetrics(DruidCoordinator.this) ); } private List<CoordinatorDuty> makeIndexingServiceDuties() { List<CoordinatorDuty> duties = new ArrayList<>(); duties.add(new LogUsedSegments()); duties.addAll(indexingServiceDuties); // CompactSegmentsDuty should be the last duty as it can take a long time to complete duties.addAll(makeCompactSegmentsDuty()); log.debug( "Done making indexing service duties %s", duties.stream().map(duty -> duty.getClass().getName()).collect(Collectors.toList()) ); return ImmutableList.copyOf(duties); } private List<CoordinatorDuty> makeCompactSegmentsDuty() { return ImmutableList.of(compactSegments); } @VisibleForTesting protected class DutiesRunnable implements Runnable { private final long startTimeNanos = System.nanoTime(); private final List<CoordinatorDuty> duties; private final int startingLeaderCounter; private final String dutiesRunnableAlias; protected DutiesRunnable(List<CoordinatorDuty> duties, final int startingLeaderCounter, String alias) { this.duties = duties; this.startingLeaderCounter = startingLeaderCounter; this.dutiesRunnableAlias = alias; } @VisibleForTesting protected void initBalancerExecutor() { final int currentNumber = getDynamicConfigs().getBalancerComputeThreads(); final String threadNameFormat = "coordinator-cost-balancer-%s"; // fist time initialization if (balancerExec == null) { balancerExec = MoreExecutors.listeningDecorator(Execs.multiThreaded( currentNumber, threadNameFormat )); cachedBalancerThreadNumber = currentNumber; return; } if (cachedBalancerThreadNumber != currentNumber) { log.info( "balancerComputeThreads has been changed from [%s] to [%s], recreating the thread pool.", cachedBalancerThreadNumber, currentNumber ); balancerExec.shutdownNow(); balancerExec = MoreExecutors.listeningDecorator(Execs.multiThreaded( currentNumber, threadNameFormat )); cachedBalancerThreadNumber = currentNumber; } } @Override public void run() { try { final long globalStart = System.nanoTime(); synchronized (lock) { if (!coordLeaderSelector.isLeader()) { log.info("LEGGO MY EGGO. [%s] is leader.", coordLeaderSelector.getCurrentLeader()); stopBeingLeader(); return; } } List<Boolean> allStarted = Arrays.asList( segmentsMetadataManager.isPollingDatabasePeriodically(), serverInventoryView.isStarted() ); for (Boolean aBoolean : allStarted) { if (!aBoolean) { log.error("InventoryManagers not started[%s]", allStarted); stopBeingLeader(); return; } } initBalancerExecutor(); BalancerStrategy balancerStrategy = factory.createBalancerStrategy(balancerExec); // Do coordinator stuff. DataSourcesSnapshot dataSourcesSnapshot = segmentsMetadataManager.getSnapshotOfDataSourcesWithAllUsedSegments(); DruidCoordinatorRuntimeParams params = DruidCoordinatorRuntimeParams .newBuilder() .withDatabaseRuleManager(metadataRuleManager) .withStartTimeNanos(startTimeNanos) .withSnapshotOfDataSourcesWithAllUsedSegments(dataSourcesSnapshot) .withDynamicConfigs(getDynamicConfigs()) .withCompactionConfig(getCompactionConfig()) .withEmitter(emitter) .withBalancerStrategy(balancerStrategy) .build(); boolean coordinationPaused = getDynamicConfigs().getPauseCoordination(); if (coordinationPaused && coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { log.debug( "Coordination is paused via dynamic configs! I will not be running Coordination Duties at this time" ); } for (CoordinatorDuty duty : duties) { // Don't read state and run state in the same duty otherwise racy conditions may exist if (!coordinationPaused && coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { final long start = System.nanoTime(); params = duty.run(params); final long end = System.nanoTime(); if (params == null) { // This duty wanted to cancel the run. No log message, since the duty should have logged a reason. return; } else { params.getCoordinatorStats().addToDutyStat("runtime", duty.getClass().getName(), TimeUnit.NANOSECONDS.toMillis(end - start)); } } } // Emit the runtime of the full DutiesRunnable params.getEmitter().emit( new ServiceMetricEvent.Builder() .setDimension(DruidMetrics.DUTY_GROUP, dutiesRunnableAlias) .build("coordinator/global/time", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - globalStart)) ); } catch (Exception e) { log.makeAlert(e, "Caught exception, ignoring so that schedule keeps going.").emit(); } } } /** * Updates the enclosing {@link DruidCoordinator}'s state and prepares an immutable view of the cluster state (which * consists of {@link DruidCluster} and {@link SegmentReplicantLookup}) and feeds it into {@link * DruidCoordinatorRuntimeParams} for use in subsequent {@link CoordinatorDuty}s (see the order in {@link * #makeHistoricalManagementDuties()}). */ private class UpdateCoordinatorStateAndPrepareCluster implements CoordinatorDuty { @Nullable @Override public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) { List<ImmutableDruidServer> currentServers = prepareCurrentServers(); startPeonsForNewServers(currentServers); final DruidCluster cluster = prepareCluster(params, currentServers); segmentReplicantLookup = SegmentReplicantLookup.make(cluster); stopPeonsForDisappearedServers(currentServers); return params.buildFromExisting() .withDruidCluster(cluster) .withLoadManagementPeons(loadManagementPeons) .withSegmentReplicantLookup(segmentReplicantLookup) .build(); } List<ImmutableDruidServer> prepareCurrentServers() { List<ImmutableDruidServer> currentServers = serverInventoryView .getInventory() .stream() .filter(DruidServer::isSegmentReplicationOrBroadcastTarget) .map(DruidServer::toImmutableDruidServer) .collect(Collectors.toList()); if (log.isDebugEnabled()) { // Display info about all segment-replicatable (historical and bridge) servers log.debug("Servers"); for (ImmutableDruidServer druidServer : currentServers) { log.debug(" %s", druidServer); log.debug(" -- DataSources"); for (ImmutableDruidDataSource druidDataSource : druidServer.getDataSources()) { log.debug(" %s", druidDataSource); } } } return currentServers; } void startPeonsForNewServers(List<ImmutableDruidServer> currentServers) { for (ImmutableDruidServer server : currentServers) { loadManagementPeons.computeIfAbsent(server.getName(), serverName -> { LoadQueuePeon loadQueuePeon = taskMaster.giveMePeon(server); loadQueuePeon.start(); log.debug("Created LoadQueuePeon for server[%s].", server.getName()); return loadQueuePeon; }); } } DruidCluster prepareCluster(DruidCoordinatorRuntimeParams params, List<ImmutableDruidServer> currentServers) { Set<String> decommissioningServers = params.getCoordinatorDynamicConfig().getDecommissioningNodes(); final DruidCluster cluster = new DruidCluster(); for (ImmutableDruidServer server : currentServers) { cluster.add( new ServerHolder( server, loadManagementPeons.get(server.getName()), decommissioningServers.contains(server.getHost()) ) ); } return cluster; } void stopPeonsForDisappearedServers(List<ImmutableDruidServer> servers) { final Set<String> disappeared = Sets.newHashSet(loadManagementPeons.keySet()); for (ImmutableDruidServer server : servers) { disappeared.remove(server.getName()); } for (String name : disappeared) { log.debug("Removing listener for server[%s] which is no longer there.", name); LoadQueuePeon peon = loadManagementPeons.remove(name); peon.stop(); } } } }
package edu.cs4730.floatingcube; /** * Created by Seker on 7/2/2015. * * * This code actually will draw a cube. * * Some of the code is used from https://github.com/christopherperry/cube-rotation * and changed up to opengl 3.0 */ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import android.opengl.GLES30; import android.util.Log; public class Cube { private int mProgramObject; private int mMVPMatrixHandle; private int mColorHandle; private FloatBuffer mVertices; //initial size of the cube. set here, so it is easier to change later. float size = 0.4f; //this is the initial data, which will need to translated into the mVertices variable in the consturctor. float[] mVerticesData = new float[]{ //////////////////////////////////////////////////////////////////// // FRONT //////////////////////////////////////////////////////////////////// // Triangle 1 -size, size, size, // top-left -size, -size, size, // bottom-left size, -size, size, // bottom-right // Triangle 2 size, -size, size, // bottom-right size, size, size, // top-right -size, size, size, // top-left //////////////////////////////////////////////////////////////////// // BACK //////////////////////////////////////////////////////////////////// // Triangle 1 -size, size, -size, // top-left -size, -size, -size, // bottom-left size, -size, -size, // bottom-right // Triangle 2 size, -size, -size, // bottom-right size, size, -size, // top-right -size, size, -size, // top-left //////////////////////////////////////////////////////////////////// // LEFT //////////////////////////////////////////////////////////////////// // Triangle 1 -size, size, -size, // top-left -size, -size, -size, // bottom-left -size, -size, size, // bottom-right // Triangle 2 -size, -size, size, // bottom-right -size, size, size, // top-right -size, size, -size, // top-left //////////////////////////////////////////////////////////////////// // RIGHT //////////////////////////////////////////////////////////////////// // Triangle 1 size, size, -size, // top-left size, -size, -size, // bottom-left size, -size, size, // bottom-right // Triangle 2 size, -size, size, // bottom-right size, size, size, // top-right size, size, -size, // top-left //////////////////////////////////////////////////////////////////// // TOP //////////////////////////////////////////////////////////////////// // Triangle 1 -size, size, -size, // top-left -size, size, size, // bottom-left size, size, size, // bottom-right // Triangle 2 size, size, size, // bottom-right size, size, -size, // top-right -size, size, -size, // top-left //////////////////////////////////////////////////////////////////// // BOTTOM //////////////////////////////////////////////////////////////////// // Triangle 1 -size, -size, -size, // top-left -size, -size, size, // bottom-left size, -size, size, // bottom-right // Triangle 2 size, -size, size, // bottom-right size, -size, -size, // top-right -size, -size, -size // top-left }; float colorcyan[] = myColor.cyan(); float colorblue[] = myColor.blue(); float colorred[] = myColor.red(); float colorgray[] = myColor.gray(); float colorgreen[] = myColor.green(); float coloryellow[] = myColor.yellow(); //vertex shader code String vShaderStr = "#version 300 es \n" + "uniform mat4 uMVPMatrix; \n" + "in vec4 vPosition; \n" + "void main() \n" + "{ \n" + " gl_Position = uMVPMatrix * vPosition; \n" + "} \n"; //fragment shader code. String fShaderStr = "#version 300 es \n" + "precision mediump float; \n" + "uniform vec4 vColor; \n" + "out vec4 fragColor; \n" + "void main() \n" + "{ \n" + " fragColor = vColor; \n" + "} \n"; String TAG = "Cube"; //finally some methods //constructor public Cube() { //first setup the mVertices correctly. mVertices = ByteBuffer .allocateDirect(mVerticesData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(mVerticesData); mVertices.position(0); //setup the shaders int vertexShader; int fragmentShader; int programObject; int[] linked = new int[1]; // Load the vertex/fragment shaders vertexShader = myStereoRenderer.LoadShader(GLES30.GL_VERTEX_SHADER, vShaderStr); fragmentShader = myStereoRenderer.LoadShader(GLES30.GL_FRAGMENT_SHADER, fShaderStr); // Create the program object programObject = GLES30.glCreateProgram(); if (programObject == 0) { Log.e(TAG, "So some kind of error, but what?"); return; } GLES30.glAttachShader(programObject, vertexShader); GLES30.glAttachShader(programObject, fragmentShader); // Bind vPosition to attribute 0 GLES30.glBindAttribLocation(programObject, 0, "vPosition"); // Link the program GLES30.glLinkProgram(programObject); // Check the link status GLES30.glGetProgramiv(programObject, GLES30.GL_LINK_STATUS, linked, 0); if (linked[0] == 0) { Log.e(TAG, "Error linking program:"); Log.e(TAG, GLES30.glGetProgramInfoLog(programObject)); GLES30.glDeleteProgram(programObject); return; } // Store the program object mProgramObject = programObject; //now everything is setup and ready to draw. } public void draw(float[] mvpMatrix) { // Use the program object GLES30.glUseProgram(mProgramObject); // get handle to shape's transformation matrix mMVPMatrixHandle = GLES30.glGetUniformLocation(mProgramObject, "uMVPMatrix"); myStereoRenderer.checkGlError("glGetUniformLocation"); // get handle to fragment shader's vColor member mColorHandle = GLES30.glGetUniformLocation(mProgramObject, "vColor"); // Apply the projection and view transformation GLES30.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0); myStereoRenderer.checkGlError("glUniformMatrix4fv"); int VERTEX_POS_INDX = 0; mVertices.position(VERTEX_POS_INDX); //just in case. We did it already though. //add all the points to the space, so they can be correct by the transformations. //would need to do this even if there were no transformations actually. GLES30.glVertexAttribPointer(VERTEX_POS_INDX, 3, GLES30.GL_FLOAT, false, 0, mVertices); GLES30.glEnableVertexAttribArray(VERTEX_POS_INDX); //Now we are ready to draw the cube finally. int startPos =0; int verticesPerface = 6; //draw front face GLES30.glUniform4fv(mColorHandle, 1, colorblue, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES,startPos,verticesPerface); startPos += verticesPerface; //draw back face GLES30.glUniform4fv(mColorHandle, 1, colorcyan, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES, startPos, verticesPerface); startPos += verticesPerface; //draw left face GLES30.glUniform4fv(mColorHandle, 1, colorred, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES,startPos,verticesPerface); startPos += verticesPerface; //draw right face GLES30.glUniform4fv(mColorHandle, 1, colorgray, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES,startPos,verticesPerface); startPos += verticesPerface; //draw top face GLES30.glUniform4fv(mColorHandle, 1, colorgreen, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES,startPos,verticesPerface); startPos += verticesPerface; //draw bottom face GLES30.glUniform4fv(mColorHandle, 1, coloryellow, 0); GLES30.glDrawArrays(GLES30.GL_TRIANGLES,startPos,verticesPerface); //last face, so no need to increment. } }
package tools; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Properties; import server.Server; /* * * @author Ben Sixel * FileHandler class. Deals with system files: * writing to error logs, chat logs, creating directories and properties, etc. */ public class FileHandler { public static final String chatLogPath = "server_chat_log.log"; public static final String errorLogPath = "error_log.log"; public static final String configPath = "chat_server.properties"; /** * Static method to print a message to both the console and the error/debug log. * @param msg The message being printed/saved. */ public static void debugPrint(String msg) { String fnlMsg = SystemInfo.getFullDate() + ": " + msg; System.err.println(fnlMsg); System.err.print("> "); writeToErrorLog(fnlMsg); } /** * Static method to print a message to both the console and the chat log. * @param msg The message being printed/saved. */ public static void chatPrint(String msg) { System.out.println(msg); System.out.print("> "); writeToChatLog(msg); } /** * Initiates a config file. */ public static void generateConfigFile() { try { File file = new File(configPath); if (file.createNewFile()) { FileWriter writer = new FileWriter(configPath, true); PrintWriter printer = new PrintWriter(writer); writer.close(); printer.close(); } } catch (IOException e) { debugPrint("Error generating config file: " + configPath); debugPrint(e.getStackTrace()[0].toString()); } } /** * Gets a property from the server properties file. * @param property The name of the desired property. * @return The valuen of the desired property from the properties file. */ public static String getProperty(String property) { String res = null; InputStream fileStream = null; try { new File(configPath).createNewFile(); Properties properties = new Properties(); fileStream = new FileInputStream(new File(configPath)); properties.load(fileStream); res = properties.getProperty(property); } catch (IOException e) { debugPrint(e.getStackTrace()[0].toString()); } finally { try { fileStream.close(); } catch (IOException e) { debugPrint(e.getStackTrace()[0].toString()); } } return res; } /** * Sets a property in the server properties file. * @param property The property being set. * @param value The value (as a string) we are giving the property. */ public static void setProperty(String property, String value) { try { File configFile = new File(configPath); configFile.createNewFile(); Properties properties = new Properties(); properties.setProperty(property, value); OutputStream writer = new FileOutputStream(configFile); properties.store(writer, "Saved user info"); writer.close(); } catch (IOException e) { debugPrint(e.getStackTrace()[0].toString()); } } /** * @deprecated Use {@link #saveProperties(Server)} instead. */ @Deprecated public static void initUserPrefs() { try { File configFile = new File(configPath); configFile.createNewFile(); InputStream configReader = new FileInputStream(configFile); Properties defaultProperties = new Properties(); defaultProperties.setProperty("last_port", ""); defaultProperties.setProperty("last_password", ""); Properties userProperties = new Properties(defaultProperties); userProperties.load(configReader); configReader.close(); } catch (IOException e) { debugPrint(e.getStackTrace()[0].toString()); } } /** * Used for writing messages to the server's chat log. * @param message The message to be saved to the chat log. */ public static void writeToChatLog(String message){ try { new File(chatLogPath).createNewFile(); FileWriter writer = new FileWriter(chatLogPath, true); PrintWriter printer = new PrintWriter(writer); printer.printf("%s" + "%n", message.trim()); writer.close(); printer.close(); } catch (IOException e) { debugPrint("Error writing to chat log: " + chatLogPath); debugPrint(e.getStackTrace()[0].toString()); } } /** * Used for writing messages to the server's error/debug log. * @param message The message to be saved to the error log. */ public static void writeToErrorLog(String message){ try { new File(chatLogPath).createNewFile(); FileWriter writer = new FileWriter(errorLogPath, true); PrintWriter printer = new PrintWriter(writer); printer.printf("%s" + "%n", message.trim()); writer.close(); printer.close(); } catch (IOException e) { debugPrint("Error writing to chat log: " + errorLogPath); debugPrint(e.getStackTrace()[0].toString()); } } /** * Stores the properties for a given server object in the properties file. * @param server The server whose properties we are saving to file. */ public static void saveProperties(Server server) { try { File configFile = new File(configPath); configFile.createNewFile(); Properties properties = new Properties(); properties.setProperty("last_port", ((Integer) server.getPortStart()).toString()); properties.setProperty("last_password", server.getPassword()); String prevAdmins = getProperty("admins"); if (prevAdmins != null) { properties.setProperty("admins", prevAdmins); } else { properties.setProperty("admins", ""); } OutputStream writer = new FileOutputStream(configFile); properties.store(writer, "Saved user info"); writer.close(); } catch (IOException e) { debugPrint("Error writing to config file: " + configPath); debugPrint(e.getStackTrace()[0].toString()); } } }
/* * Copyright 2015 Textocat * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.textocat.textokit.io.brat; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.textocat.textokit.commons.DocumentMetadata; import com.textocat.textokit.commons.cas.FSUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.uima.UimaContext; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.cas.*; import org.apache.uima.cas.text.AnnotationFS; import org.apache.uima.collection.CollectionException; import org.apache.uima.fit.component.CasCollectionReader_ImplBase; import org.apache.uima.fit.descriptor.ConfigurationParameter; import org.apache.uima.fit.factory.initializable.InitializableFactory; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.util.Progress; import org.apache.uima.util.ProgressImpl; import org.nlplab.brat.ann.*; import org.nlplab.brat.configuration.*; import java.io.*; import java.util.*; import static com.textocat.textokit.commons.util.AnnotatorUtils.annotationTypeExist; import static com.textocat.textokit.commons.util.AnnotatorUtils.featureExist; import static org.nlplab.brat.BratConstants.*; /** * @author RGareev * @author pathfinder */ public class BratCollectionReader extends CasCollectionReader_ImplBase { public static final String PARAM_BRAT_COLLECTION_DIR = "BratCollectionDir"; public static final String PARAM_MAPPING_FACTORY_CLASS = "mappingFactoryClass"; @ConfigurationParameter(name = PARAM_BRAT_COLLECTION_DIR, mandatory = true) private File bratCollectionDir; @ConfigurationParameter(name = PARAM_MAPPING_FACTORY_CLASS, mandatory = true) private String mappingFactoryClassName; // config fields private BratTypesConfiguration bratTypesCfg; private BratUimaMappingFactory mappingFactory; // fields derived from config private BratUimaMapping mapping; private int totalDocsNum = -1; private Feature beginFeature; private Feature endFeature; private Type documentMetadataType; private Feature docMetaUriFeature; private Feature docMetaSizeFeature; // state fields private Iterator<BratDocument> bratDocIter; private int docsRead = 0; // per-CAS state fields private String currentDocName; private FromBratMappingContext mappingCtx; private CAS cas; private BratAnnotationContainer bratContainer; @Override public void initialize(UimaContext ctx) throws ResourceInitializationException { super.initialize(ctx); // initialize mappingFactory mappingFactory = InitializableFactory.create(ctx, mappingFactoryClassName, BratUimaMappingFactory.class); // make bratDocIter File[] annFiles = bratCollectionDir.listFiles( (FileFilter) FileFilterUtils.suffixFileFilter(BratDocument.ANN_FILE_SUFFIX)); List<BratDocument> bratDocs = Lists.newArrayListWithExpectedSize(annFiles.length); for (File annFile : annFiles) { String docBaseName = FilenameUtils.getBaseName(annFile.getPath()); BratDocument bratDoc = new BratDocument(bratCollectionDir, docBaseName); if (bratDoc.exists()) { bratDocs.add(bratDoc); } else { throw new IllegalStateException(String.format( "Missing txt file for %s", annFile)); } } totalDocsNum = bratDocs.size(); bratDocIter = bratDocs.iterator(); } @Override public void typeSystemInit(TypeSystem ts) throws ResourceInitializationException { super.typeSystemInit(ts); // memorize Annotation begin and end features Type annotationType = ts.getType("uima.tcas.Annotation"); beginFeature = annotationType.getFeatureByBaseName("begin"); assert beginFeature != null; endFeature = annotationType.getFeatureByBaseName("end"); assert endFeature != null; // memorize document metadata type and its features documentMetadataType = ts.getType(DocumentMetadata.class.getName()); try { annotationTypeExist(DocumentMetadata.class.getName(), documentMetadataType); docMetaUriFeature = featureExist(documentMetadataType, "sourceUri"); docMetaSizeFeature = featureExist(documentMetadataType, "documentSize"); } catch (AnalysisEngineProcessException e) { throw new ResourceInitializationException(e); } // initialize BratTypesConfiguration File annotationConfFile = new File(bratCollectionDir, ANNOTATION_CONF_FILE); if (!annotationConfFile.isFile()) { throw new IllegalStateException(String.format( "%s is missing", annotationConfFile)); } try { bratTypesCfg = BratTypesConfiguration.readFrom(annotationConfFile); } catch (IOException e) { throw new ResourceInitializationException(e); } mappingFactory.setTypeSystem(ts); mappingFactory.setBratTypes(bratTypesCfg); mapping = mappingFactory.getMapping(); } @Override public boolean hasNext() throws IOException, CollectionException { return bratDocIter.hasNext(); } @Override public void getNext(CAS cas) throws IOException, CollectionException { this.cas = cas; BratDocument bratDoc = bratDocIter.next(); currentDocName = bratDoc.getDocumentName(); // read and set text String txt = FileUtils.readFileToString(bratDoc.getTxtFile(), TXT_FILES_ENCODING); cas.setDocumentText(txt); // set DocumentMetadata String docName = FilenameUtils.getBaseName(bratDoc.getTxtFile().getPath()); setDocumentMetadata(cas, docName, txt.length()); bratContainer = new BratAnnotationContainer(bratTypesCfg); BufferedReader annReader = new BufferedReader(new InputStreamReader( new FileInputStream(bratDoc.getAnnFile()), ANN_FILES_ENCODING)); try { bratContainer.readFrom(annReader); } catch (Exception e) { throw new IllegalStateException("Parsing " + currentDocName, e); } finally { IOUtils.closeQuietly(annReader); } // prepare Mapping context mappingCtx = new FromBratMappingContext(); // map entity types for (BratEntityType bType : mapping.getEntityTypes()) { BratUimaEntityMapping entMapping = mapping.getEntityMapping(bType); Type uType = entMapping.uimaType; for (BratEntity bEntity : bratContainer.getEntities(bType)) { if (mappingCtx.isMapped(bEntity)) { continue; } AnnotationFS uAnno = cas.createAnnotation(uType, bEntity.getBegin(), bEntity.getEnd()); mapAttributes(entMapping.getAttributeToFeatureMap(), bEntity, uAnno); mapNote(entMapping, bEntity, uAnno); cas.addFsToIndexes(uAnno); mappingCtx.mapped(bEntity, uAnno); } } // map relation types for (BratRelationType bType : mapping.getRelationTypes()) { BratUimaRelationMapping relMapping = mapping.getRelationMapping(bType); for (BratRelation bRelation : bratContainer.getRelations(bType)) { if (mappingCtx.isMapped(bRelation)) { continue; } AnnotationFS uRelation = mapStructureRoles(bRelation, relMapping); List<AnnotationFS> uRelationArgs = getRelationArgs(uRelation, relMapping.featureRoles.keySet()); // set UIMA relation begin to minimal begin offset of arguments int uRelationBegin = FSUtils.intMinBy(uRelationArgs, beginFeature); uRelation.setIntValue(beginFeature, uRelationBegin); // set UIMA relation end to maximal end offset of arguments int uRelationEnd = FSUtils.intMaxBy(uRelationArgs, endFeature); uRelation.setIntValue(endFeature, uRelationEnd); // map note mapNote(relMapping, bRelation, uRelation); // memorize cas.addFsToIndexes(uRelation); mappingCtx.mapped(bRelation, uRelation); } } // map event types for (BratEventType bType : mapping.getEventTypes()) { BratUimaEventMapping evMapping = mapping.getEventMapping(bType); for (BratEvent bEvent : bratContainer.getEvents(bType)) { BratEventTrigger bTrigger = bEvent.getTrigger(); AnnotationFS uEvent = mapStructureRoles(bEvent, evMapping); // set UIMA event begin to trigger begin uEvent.setIntValue(beginFeature, bTrigger.getBegin()); // set UIMA event end to trigger end uEvent.setIntValue(endFeature, bTrigger.getEnd()); // map note mapNote(evMapping, bEvent, uEvent); // memorize cas.addFsToIndexes(uEvent); mappingCtx.mapped(bEvent, uEvent); } } // increase progress counter docsRead++; // clean per-CAS state currentDocName = null; mappingCtx = null; this.cas = null; bratContainer = null; } private void mapAttributes(Map<String, Feature> attr2FeatMap, HasAttributes attrHolder, AnnotationFS uAnno) { for (String attrName : attr2FeatMap.keySet()) { Feature uFeat = attr2FeatMap.get(attrName); Object attrValue = attrHolder.getAttributesMap().get(attrName); if (attrValue == null) { continue; } else if (attrValue instanceof Boolean) { uAnno.setBooleanValue(uFeat, (Boolean) attrValue); } else if (attrValue instanceof String) { uAnno.setStringValue(uFeat, (String) attrValue); } else { throw new IllegalStateException(); } } } private <BT extends BratType> void mapNote(BratUimaTypeMappingBase<BT> typeMapping, BratAnnotation<BT> bAnno, AnnotationFS uAnno) { BratNoteMapper noteMapper = typeMapping.noteMapper; if (noteMapper == null) { return; } Collection<BratNoteAnnotation> notes = bratContainer.getNotes(bAnno); for (BratNoteAnnotation note : notes) { try { noteMapper.parseNote(uAnno, note.getContent()); } catch (Exception e) { throw new IllegalStateException(String.format( "Can't parse note %s in document %s", note, currentDocName)); } } } private void setDocumentMetadata(CAS cas, String docName, int docSize) { AnnotationFS docMeta = cas.createAnnotation(documentMetadataType, 0, 0); docMeta.setLongValue(docMetaSizeFeature, docSize); docMeta.setStringValue(docMetaUriFeature, docName); cas.addFsToIndexes(docMeta); } @Override public Progress[] getProgress() { return new Progress[]{ new ProgressImpl(docsRead, totalDocsNum, Progress.ENTITIES) }; } private List<AnnotationFS> getRelationArgs(AnnotationFS anno, Set<Feature> argFeatures) { List<AnnotationFS> result = Lists.newLinkedList(); assert argFeatures.size() == 2; for (Feature f : argFeatures) { AnnotationFS argAnno = (AnnotationFS) anno.getFeatureValue(f); if (argAnno == null) { throw new IllegalStateException(); } result.add(argAnno); } return result; } private <BT extends BratType, BA extends BratStructureAnnotation<BT>> AnnotationFS mapStructureRoles( BA bAnno, BratUimaStructureMapping<BT> strMapping) { AnnotationFS result = cas.createAnnotation(strMapping.uimaType, 0, 0); for (Feature roleFeature : strMapping.featureRoles.keySet()) { String roleName = strMapping.featureRoles.get(roleFeature); Collection<BratAnnotation<?>> roleBratAnnos = bAnno.getRoleAnnotations().get(roleName); if (roleBratAnnos.isEmpty()) { continue; } List<AnnotationFS> roleUimaAnnos = Lists.newLinkedList(); for (BratAnnotation<?> roleBratAnno : roleBratAnnos) { AnnotationFS roleUimaAnno = mappingCtx.getMapped(roleBratAnno); if (roleUimaAnno == null) { throw new IllegalStateException(String.format( "Brat annotation %s has not been mapped", roleBratAnno)); } roleUimaAnnos.add(roleUimaAnno); } FeatureStructure featVal; if (PUtils.hasCollectionRange(roleFeature)) { featVal = PUtils.toCompatibleCollection(cas, roleFeature, roleUimaAnnos); } else { if (roleUimaAnnos.size() > 1) { getLogger().error(String.format( "Too much role '%s' values in anno %s in doc %s. " + "Only the first value will be mapped.", roleName, bAnno.getId(), currentDocName)); } featVal = roleUimaAnnos.get(0); } result.setFeatureValue(roleFeature, featVal); } return result; } private class FromBratMappingContext { private Map<String, AnnotationFS> mappedAnnotations = Maps.newHashMap(); private boolean isMapped(BratAnnotation<?> bAnno) { return mappedAnnotations.containsKey(bAnno.getId()); } private AnnotationFS getMapped(BratAnnotation<?> bAnno) { return mappedAnnotations.get(bAnno.getId()); } private void mapped(BratAnnotation<?> bAnno, AnnotationFS uAnno) { if (mappedAnnotations.put(bAnno.getId(), uAnno) != null) { // sanity check throw new IllegalStateException(); } } } }
package br.com.makadu.makaduevento.adapters; import android.content.Context; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.TextView; import java.util.HashMap; import java.util.List; import br.com.makadu.makaduevento.R; import br.com.makadu.makaduevento.Util.Util; import br.com.makadu.makaduevento.model.Speaker; import br.com.makadu.makaduevento.model.Talk; import br.com.makadu.makaduevento.model.Question; /** * Created by lucasschwalbeferreira on 08/04/15. */ public class TalkDetailExpandableAdapter extends BaseExpandableListAdapter { private Talk talk; private List<String> listGroup; private HashMap<String, List<Talk>> listDataProg; private HashMap<String, List<Speaker>> listDataPalestrante; private HashMap<String, List<Question>> listDataQuestion; private LayoutInflater inflater; public TalkDetailExpandableAdapter(Context context, List<String> listGroup, HashMap<String, List<Talk>> listDataProg,HashMap<String,List<Speaker>> listDataPalestrante,HashMap<String, List<Question>> listDataQuestion, Talk talk) { this.talk = talk; this.listGroup = listGroup; this.listDataProg = listDataProg; this.listDataPalestrante = (HashMap<String, List<Speaker>>) listDataPalestrante; this.listDataQuestion = listDataQuestion; inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); } @Override public int getGroupCount() { return listGroup.size(); } @Override public int getChildrenCount(int groupPosition) { if(groupPosition == 0) { return listDataProg.get(listGroup.get(groupPosition)).size(); } else if(groupPosition == 1) { return listDataPalestrante.get(listGroup.get(groupPosition)).size(); } else { return listDataQuestion.get(listGroup.get(groupPosition)).size(); } } @Override public Object getGroup(int groupPosition) { return listGroup.get(groupPosition); } @Override public Talk getChild(int groupPosition, int childPosition) { return listDataProg.get(listGroup.get(groupPosition)).get(childPosition); } public Speaker getChildPalestrante(int groupPosition, int childPosition) { return listDataPalestrante.get(listGroup.get(groupPosition)).get(childPosition); } public Question getChildQuestion(int groupPosition, int childPosition) { return listDataQuestion.get(listGroup.get(groupPosition)).get(childPosition); } @Override public long getGroupId(int groupPosition) { return groupPosition; } @Override public long getChildId(int groupPosition, int childPosition) { return childPosition; } @Override public boolean hasStableIds() { return false; } @Override public View getGroupView(int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) { ViewHolderGroup holder; if(convertView == null) { convertView = inflater.inflate(R.layout.header_expandable_list_view_programacao_about, null); holder = new ViewHolderGroup(); convertView.setTag(holder); holder.tvGroup = (TextView) convertView.findViewById(R.id.txt_header_programacao_about); } else { holder = (ViewHolderGroup) convertView.getTag(); } holder.tvGroup.setText(listGroup.get(groupPosition)); return convertView; } @Override public View getChildView(int groupPosition,final int childPosition, boolean isLastChild, View convertView, ViewGroup parent) { ViewHolderItemAbout holderAbout; ViewHolderItemPalestrante holderPalestrante; ViewHolderItemQuestion holderQuestion; Util util = new Util(); Log.d("erro_detail", "childPosition: " + childPosition + " groupPosition: " + groupPosition); if(convertView == null) { if(groupPosition == 0) { final Talk val = (Talk) getChild(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_about_talk, null); holderAbout = new ViewHolderItemAbout(); convertView.setTag(holderAbout); holderAbout.about = (TextView)convertView.findViewById(R.id.txt_about_talk); holderAbout.about.setText(val.description); } if(groupPosition == 1) { final Speaker val = (Speaker) getChildPalestrante(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_speaker, null); holderPalestrante = new ViewHolderItemPalestrante(); convertView.setTag(holderPalestrante); holderPalestrante.name = (TextView)convertView.findViewById(R.id.txtNome_palestrante); holderPalestrante.name.setText(val.getNome()); holderPalestrante.about_speaker = (TextView)convertView.findViewById(R.id.txtObs); holderPalestrante.about_speaker.setText(val.getDescricao_palestrante()); } if(groupPosition == 2) { final Question val = (Question) getChildQuestion(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_question, null); holderQuestion = new ViewHolderItemQuestion(); convertView.setTag(holderQuestion); holderQuestion.question = (TextView)convertView.findViewById(R.id.txt_question_talk); holderQuestion.question.setText(val.getQuestion()); holderQuestion.speaker = (TextView)convertView.findViewById(R.id.txt_palestrante_talk); holderQuestion.speaker.setText(val.getSpeaker()); holderQuestion.date = (TextView)convertView.findViewById(R.id.txt_date_hora_talk); //holderQuestion.date.setText(util.getDateHour(val.getDate())); holderQuestion.date.setText(val.getDate()); } } else{ if(groupPosition == 0) { final Talk val = (Talk) getChild(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_about_talk, null); holderAbout = new ViewHolderItemAbout(); convertView.setTag(holderAbout); holderAbout.about = (TextView)convertView.findViewById(R.id.txt_about_talk); holderAbout.about.setText(val.description); } if(groupPosition == 1) { final Speaker val = (Speaker) getChildPalestrante(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_speaker, null); holderPalestrante = new ViewHolderItemPalestrante(); convertView.setTag(holderPalestrante); holderPalestrante.name = (TextView)convertView.findViewById(R.id.txtNome_palestrante); holderPalestrante.name.setText(val.getNome()); holderPalestrante.about_speaker = (TextView)convertView.findViewById(R.id.txtObs); holderPalestrante.about_speaker.setText(val.getDescricao_palestrante()); } if(groupPosition == 2) { final Question val = (Question) getChildQuestion(groupPosition, childPosition); convertView = inflater.inflate(R.layout.row_question, null); holderQuestion = new ViewHolderItemQuestion(); convertView.setTag(holderQuestion); holderQuestion.question = (TextView)convertView.findViewById(R.id.txt_question_talk); holderQuestion.question.setText(val.getQuestion()); holderQuestion.speaker = (TextView)convertView.findViewById(R.id.txt_palestrante_talk); holderQuestion.speaker.setText(val.getSpeaker()); holderQuestion.date = (TextView)convertView.findViewById(R.id.txt_date_hora_talk); //holderQuestion.date.setText(util.getDateHour(val.getDate())); holderQuestion.date.setText(val.getDate()); } } return convertView; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { return true; } class ViewHolderGroup { TextView tvGroup; } class ViewHolderItemAbout { TextView about; } class ViewHolderItemPalestrante { TextView name; TextView about_speaker; } class ViewHolderItemQuestion { TextView question; TextView speaker; TextView date; } }
package org.droidparts.http.worker; import static com.neopixl.restpixl.NPRestPixlConf.FORMAT; import static org.droidparts.util.io.IOUtils.silentlyClose; import java.io.BufferedInputStream; import java.io.File; import java.io.OutputStream; import java.net.Authenticator; import java.net.CookieHandler; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.PasswordAuthentication; import java.net.Proxy; import java.net.URL; import java.net.UnknownHostException; import org.apache.http.auth.AuthScope; import org.droidparts.http.CookieJar; import org.droidparts.http.HTTPException; import org.droidparts.http.HTTPResponse; import android.content.Context; import android.util.Log; import android.util.Pair; public class HttpURLConnectionWorker extends HTTPWorker { public static final String GET = "GET"; public static final String PUT = "PUT"; public static final String POST = "POST"; public static final String DELETE = "DELETE"; private Proxy proxy; private PasswordAuthentication passAuth; private AuthScope authScope; // ICS+ public static void setHttpResponseCacheEnabled(Context ctx, boolean enabled) { File cacheDir = new File(ctx.getCacheDir(), "http"); long cacheSize = 10 * 1024 * 1024; // 10 MiB try { Class<?> cls = Class.forName("android.net.http.HttpResponseCache"); if (enabled) { cls.getMethod("install", File.class, long.class).invoke(null, cacheDir, cacheSize); } else { Object instance = cls.getMethod("getInstalled").invoke(null); if (instance != null) { cls.getMethod("delete").invoke(instance); } } } catch (Exception e) { Log.i("RestPixl",e.toString()); } } public HttpURLConnectionWorker(String userAgent) { super(userAgent); } @Override public void setCookieJar(CookieJar cookieJar) { CookieHandler.setDefault(cookieJar); } @Override public void authenticateBasic(String user, String password, AuthScope scope) { passAuth = new PasswordAuthentication(user, password.toCharArray()); authScope = scope; } public void setProxy(Proxy proxy) { this.proxy = proxy; } public HttpURLConnection getConnection(String urlStr, String requestMethod) throws HTTPException { try { URL url = new URL(urlStr); HttpURLConnection conn; if (proxy != null) { conn = (HttpURLConnection) url.openConnection(proxy); } else { conn = (HttpURLConnection) url.openConnection(); } for (String key : headers.keySet()) { for (String val : headers.get(key)) { conn.addRequestProperty(key, val); } } conn.setRequestProperty("Accept-Encoding", "gzip,deflate"); conn.setRequestProperty("Connection", "Keep-Alive"); setupBasicAuth(); conn.setRequestMethod(requestMethod); if (PUT.equals(requestMethod) || POST.equals(requestMethod)) { conn.setDoOutput(true); } return conn; } catch (Exception e) { throw new HTTPException(e); } } public static void postOrPut(HttpURLConnection conn, String contentType, String data) throws HTTPException { conn.setRequestProperty("Accept-Charset", FORMAT); conn.setRequestProperty("Content-Type", contentType); conn.setRequestProperty("Connection", "Keep-Alive"); if(data != null){ OutputStream os = null; try { os = conn.getOutputStream(); os.write(data.getBytes(FORMAT)); } catch (Exception e) { throw new HTTPException(e); } finally { silentlyClose(os); } } } public static HTTPResponse getReponse(HttpURLConnection conn) throws HTTPException { HTTPResponse response = new HTTPResponse(); response.code = connectAndGetResponseCodeOrThrow(conn); response.headers = conn.getHeaderFields(); response.body = HTTPInputStream.getInstance(conn, false).readAndClose(); return response; } public Pair<Integer, BufferedInputStream> getInputStream(String uri) throws HTTPException { HttpURLConnection conn = getConnection(uri, GET); HttpURLConnectionWorker.connectAndGetResponseCodeOrThrow(conn); int contentLength = conn.getContentLength(); HTTPInputStream is = HTTPInputStream.getInstance(conn, false); return new Pair<Integer, BufferedInputStream>(contentLength, is); } private static int connectAndGetResponseCodeOrThrow(HttpURLConnection conn) throws HTTPException { try { conn.connect(); int respCode = conn.getResponseCode(); if (isErrorResponseCode(respCode)) { HTTPInputStream is = HTTPInputStream.getInstance(conn, (conn.getErrorStream() != null)); throw new HTTPException(respCode, is.readAndClose()); } return respCode; } catch (HTTPException e) { throw e; } catch (Exception e) { throw new HTTPException(e); } } private void setupBasicAuth() { if (passAuth != null) { Authenticator.setDefault(new FixedAuthenticator(passAuth)); if (!AuthScope.ANY.equals(authScope)) { InetAddress host = null; if (authScope.getHost() != null) { try { host = InetAddress.getByName(authScope.getHost()); } catch (UnknownHostException e) { Log.e("RestPixl","Failed to setup basic auth."); Log.d("RestPixl",e.toString()); Authenticator.setDefault(null); return; } } int port = (authScope.getPort() == AuthScope.ANY_PORT) ? 0 : authScope.getPort(); Authenticator.requestPasswordAuthentication(host, port, null, authScope.getRealm(), authScope.getScheme()); } } } private static class FixedAuthenticator extends Authenticator { private PasswordAuthentication passAuth; public FixedAuthenticator(PasswordAuthentication passAuth) { this.passAuth = passAuth; } @Override protected PasswordAuthentication getPasswordAuthentication() { try { return passAuth; } finally { passAuth = null; } } } }
/* * Copyright 2003-2011 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ipp.psiutils; import com.intellij.openapi.project.Project; import com.intellij.psi.*; public class ControlFlowUtils { private ControlFlowUtils() { } public static boolean statementMayCompleteNormally(PsiStatement statement) { if (statement instanceof PsiBreakStatement || statement instanceof PsiContinueStatement || statement instanceof PsiReturnStatement || statement instanceof PsiThrowStatement) { return false; } else if (statement instanceof PsiExpressionListStatement || statement instanceof PsiExpressionStatement || statement instanceof PsiEmptyStatement || statement instanceof PsiAssertStatement || statement instanceof PsiDeclarationStatement) { return true; } else if (statement instanceof PsiForStatement) { final PsiForStatement loopStatement = (PsiForStatement)statement; final PsiExpression test = loopStatement.getCondition(); return test != null && !isBooleanConstant(test, true) || statementIsBreakTarget(loopStatement); } else if (statement instanceof PsiForeachStatement) { return true; } else if (statement instanceof PsiWhileStatement) { final PsiWhileStatement loopStatement = (PsiWhileStatement)statement; final PsiExpression test = loopStatement.getCondition(); return !isBooleanConstant(test, true) || statementIsBreakTarget(loopStatement); } else if (statement instanceof PsiDoWhileStatement) { final PsiDoWhileStatement loopStatement = (PsiDoWhileStatement)statement; final PsiExpression test = loopStatement.getCondition(); final PsiStatement body = loopStatement.getBody(); return statementMayCompleteNormally(body) && !isBooleanConstant(test, true) || statementIsBreakTarget(loopStatement); } else if (statement instanceof PsiSynchronizedStatement) { final PsiCodeBlock body = ((PsiSynchronizedStatement)statement).getBody(); return codeBlockMayCompleteNormally(body); } else if (statement instanceof PsiBlockStatement) { final PsiCodeBlock codeBlock = ((PsiBlockStatement)statement).getCodeBlock(); return codeBlockMayCompleteNormally(codeBlock); } else if (statement instanceof PsiLabeledStatement) { final PsiLabeledStatement labeledStatement = (PsiLabeledStatement)statement; final PsiStatement body = labeledStatement.getStatement(); return statementMayCompleteNormally(body) || statementIsBreakTarget(body); } else if (statement instanceof PsiIfStatement) { final PsiIfStatement ifStatement = (PsiIfStatement)statement; final PsiStatement thenBranch = ifStatement.getThenBranch(); if (statementMayCompleteNormally(thenBranch)) { return true; } final PsiStatement elseBranch = ifStatement.getElseBranch(); return elseBranch == null || statementMayCompleteNormally(elseBranch); } else if (statement instanceof PsiTryStatement) { final PsiTryStatement tryStatement = (PsiTryStatement)statement; final PsiCodeBlock finallyBlock = tryStatement.getFinallyBlock(); if (finallyBlock != null) { if (!codeBlockMayCompleteNormally(finallyBlock)) { return false; } } final PsiCodeBlock tryBlock = tryStatement.getTryBlock(); if (codeBlockMayCompleteNormally(tryBlock)) { return true; } final PsiCodeBlock[] catchBlocks = tryStatement.getCatchBlocks(); for (final PsiCodeBlock catchBlock : catchBlocks) { if (codeBlockMayCompleteNormally(catchBlock)) { return true; } } return false; } else if (statement instanceof PsiSwitchStatement) { final PsiSwitchStatement switchStatement = (PsiSwitchStatement)statement; if (statementIsBreakTarget(switchStatement)) { return true; } final PsiCodeBlock body = switchStatement.getBody(); if (body == null) { return true; } final PsiStatement[] statements = body.getStatements(); int lastNonLabelOffset = -1; final int lastStatementIndex = statements.length - 1; for (int i = lastStatementIndex; i >= 0; i--) { if (!(statements[i] instanceof PsiSwitchLabelStatement)) { lastNonLabelOffset = i; break; } } if (lastNonLabelOffset == -1) { return true; // it's all labels } else if (lastNonLabelOffset == lastStatementIndex) { return statementMayCompleteNormally( statements[lastStatementIndex]); } else { return true; // the last statement is a label } } else { return false; } } private static boolean codeBlockMayCompleteNormally(PsiCodeBlock block) { if (block == null) { return true; } final PsiStatement[] statements = block.getStatements(); for (final PsiStatement statement : statements) { if (!statementMayCompleteNormally(statement)) { return false; } } return true; } private static boolean isBooleanConstant(PsiExpression expression, boolean b) { if (expression == null) { return false; } final Project project = expression.getProject(); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiConstantEvaluationHelper constantEvaluationHelper = psiFacade.getConstantEvaluationHelper(); final Object value = constantEvaluationHelper.computeConstantExpression (expression, false); if (!(value instanceof Boolean)) { return false; } final Boolean aBoolean = (Boolean)value; return aBoolean.booleanValue() == b; } private static boolean statementIsBreakTarget(PsiStatement statement) { if (statement == null) { return false; } final BreakTargetFinder breakFinder = new BreakTargetFinder(statement); statement.accept(breakFinder); return breakFinder.breakFound(); } public static boolean statementContainsNakedBreak(PsiStatement statement) { if (statement == null) { return false; } final NakedBreakFinder breakFinder = new NakedBreakFinder(); statement.accept(breakFinder); return breakFinder.breakFound(); } private static class BreakTargetFinder extends JavaRecursiveElementWalkingVisitor { private boolean m_found = false; private final PsiStatement m_target; private BreakTargetFinder(PsiStatement target) { m_target = target; } public boolean breakFound() { return m_found; } @Override public void visitElement(PsiElement element) { if (m_found) { return; } super.visitElement(element); } @Override public void visitReferenceExpression( PsiReferenceExpression expression) { } @Override public void visitBreakStatement(PsiBreakStatement statement) { super.visitBreakStatement(statement); final PsiStatement exitedStatement = statement.findExitedStatement(); if (exitedStatement == null) { return; } if (exitedStatement.equals(m_target)) { m_found = true; } } } private static class NakedBreakFinder extends JavaRecursiveElementWalkingVisitor { private boolean m_found = false; public boolean breakFound() { return m_found; } @Override public void visitElement(PsiElement element) { if (m_found) { return; } super.visitElement(element); } @Override public void visitReferenceExpression( PsiReferenceExpression expression) { } @Override public void visitBreakStatement(PsiBreakStatement statement) { if (statement.getLabelIdentifier() != null) { return; } m_found = true; } @Override public void visitDoWhileStatement( PsiDoWhileStatement statement) { // don't drill down } @Override public void visitForStatement(PsiForStatement statement) { // don't drill down } @Override public void visitForeachStatement(PsiForeachStatement statement) { // don't drill down } @Override public void visitWhileStatement(PsiWhileStatement statement) { // don't drill down } @Override public void visitSwitchStatement( PsiSwitchStatement statement) { // don't drill down } } }
package org.colin.Hanoi; class Hanoi { public static final int NUM_TOWERS = 3; public static final int START_TOWER = 0; public static final int END_TOWER = 2; public static final int MAX_BLOCKS = 12; class Move { int from_tower; int to_tower; Move() { from_tower = 0; to_tower = 0; } }; class Tower { boolean blocks[]; Tower() { blocks = new boolean [MAX_BLOCKS]; for (int i = 0; i < MAX_BLOCKS; ++i) blocks[i] = false; } }; public Hanoi(HanoiView view) { init(); view_ = view; } public void setTowerHeight(int h) { towerHeight_ = h; } public int getTowerHeight() { return towerHeight_; } public void init() { tower_ = new Tower [NUM_TOWERS]; for (int i = 0; i < NUM_TOWERS; ++i) tower_[i] = new Tower(); current_move = new Move(); moving_ = false; setTowerHeight(7); newGame(); } public void newGame() { int tower_height = getTowerHeight(); /* Set up the Blocks so they are all on the start tower */ for (int t = 0; t < NUM_TOWERS; ++t) { Tower tower = tower_[t]; for (int i = 0; i < tower_height; ++i) tower.blocks[i] = false; } Tower tower = tower_[START_TOWER]; for (int i = 0; i < tower_height; ++i) tower.blocks[i] = true; //------ /* Reset Current Number of Moves */ number_of_moves = 0; //------ /* Reset the Target Number of Moves */ target_number_of_moves = 1; for (int i = 0; i < tower_height; ++i) target_number_of_moves *= 2; target_number_of_moves--; //------ /* Set up Drawing Constants */ total_block_width = 25; total_block_height = 50; block_spacing = 1; block_width = total_block_width/tower_height; block_height = (total_block_height - block_spacing*(tower_height - 1))/tower_height; border_width = (100 - NUM_TOWERS*total_block_width )/6; border_height = (100 - total_block_height)/2; } public void draw() { //canvas.setDataRange(0, 0, 100, 100); int current_position[] = new int [NUM_TOWERS]; for (int i = 0; i < NUM_TOWERS; ++i) current_position[i] = 0; //------ /* Draw Towers */ for (int i = 0; i < NUM_TOWERS; ++i) drawTower(i); //------ /* Draw Blocks */ int tower_height = getTowerHeight(); for (int t = 0; t < NUM_TOWERS; ++t) { Tower tower = tower_[t]; for (int i = 0; i < tower_height; ++i) { if (! tower.blocks[i]) continue; drawBlock(tower_height - i, t, i, current_position[t]++); } } //------ /* Display Number of Moves */ String msg = "Moves = " + number_of_moves + " : Target = " + target_number_of_moves; centerText(50.0, 0.0, -0.5, 1.0, msg); //------ /* Display if puzzled solved */ if (getTopOfTower(0) == -1 && getTopOfTower(1) == -1) screenMessage("Congratulations - Puzzle Solved"); } public void buttonPress(double x, double y) { moving_ = selectBlock(x, y); press_x_ = x; press_y_ = y; move_dx_ = 0.0; move_dy_ = 0.0; } public void buttonMotion(double x, double y) { move_dx_ = x - press_x_; move_dy_ = press_y_ - y; } public void buttonRelease(double x, double y) { releaseBlock(x, y); moving_ = false; } public void drawBlock(int width, int tower, int ind, int position) { //final double offsets[] = { 1, 2, 2.9, 3.7, 4.35, 4.9, 5.4, 6.0, 7, 8, 9, 10 }; double min_x = border_width + tower*(total_block_width + 2*border_width); double min_y = border_height; int tower_height = getTowerHeight(); double x_offset = (tower_height - width)*block_width/2; double y_offset = position*(block_height + block_spacing); double x1 = min_x + x_offset; double y1 = min_y + y_offset; double x2 = x1 + (width*block_width); double y2 = y1 + block_height; if (moving_ && current_move.from_tower == tower) { int from_top = getTopOfTower(current_move.from_tower); if (ind == from_top) { x1 += move_dx_; y1 += move_dy_; x2 += move_dx_; y2 += move_dy_; } } view_.drawBlock(x1, y1, x2, y2, ind, position); } public void drawTower(int tower) { int tower_height = getTowerHeight(); double min_x = border_width + tower*(total_block_width + 2*border_width); double min_y = border_height - 0.5; double max_x = min_x + total_block_width; double max_y = min_y + tower_height*(block_height + block_spacing); view_.drawLine(min_x, min_y, max_x, min_y); view_.drawLine((min_x + max_x)/2, min_y, (min_x + max_x)/2, max_y); String msg = "Tower " + (tower + 1); centerText((min_x + max_x)/2, max_y + 2, -0.5, 1.0, msg); } public boolean selectBlock(double x, double y) { double min_x[] = new double [NUM_TOWERS]; double max_x[] = new double [NUM_TOWERS]; for (int i = 0; i < NUM_TOWERS; ++i) { min_x[i] = border_width + i*(total_block_width + 2*border_width); max_x[i] = min_x[i] + total_block_width; } current_move.from_tower = -1; for (int i = 0; i < NUM_TOWERS; ++i) if (x >= min_x[i] && x <= max_x[i]) current_move.from_tower = i; if (current_move.from_tower == -1) { if (x <= max_x[0]) current_move.from_tower = 0; else current_move.from_tower = 2; } return true; } public void releaseBlock(double x, double y) { double min_x[] = new double [NUM_TOWERS]; double max_x[] = new double [NUM_TOWERS]; for (int i = 0; i < NUM_TOWERS; ++i) { min_x[i] = border_width + i*(total_block_width + 2*border_width); max_x[i] = min_x[i] + total_block_width; } current_move.to_tower = -1; for (int i = 0; i < NUM_TOWERS; ++i) if (x >= min_x[i] && x <= max_x[i]) current_move.to_tower = i; if (current_move.to_tower == -1) { if (x <= max_x[0]) current_move.to_tower = 0; else current_move.to_tower = 2; } if (validMove()) { doMove(); } else { //errMsg_ = "Invalid Move - Blocks must always be stacked by size"; } } public void solve(int block, int tower) { int tower_height = getTowerHeight(); if (block == tower_height - 1) { if (getTowerForBlock(block) != tower) { current_move.from_tower = getTowerForBlock(block); current_move.to_tower = tower; doMove(); } } else { if (getTowerForBlock(block) == tower) solve(block + 1, tower); else solve(block + 1, notTower(getTowerForBlock(block), tower)); current_move.from_tower = getTowerForBlock(block); current_move.to_tower = tower; doMove(); solve(block + 1, tower); } } public int notTower(int tower1, int tower2) { return (NUM_TOWERS - tower1 - tower2); } public int getTowerForBlock(int block) { for (int t = 0; t < NUM_TOWERS; ++t) { Tower tower = tower_[t]; if (tower.blocks[block]) return t; } return -1; } public boolean validMove() { return validMove(current_move.from_tower, current_move.to_tower); } public boolean validMove(int from_tower, int to_tower) { int from_top = getTopOfTower(from_tower); int to_top = getTopOfTower(to_tower); if (from_top > to_top) return true; else return false; } public int getTopOfTower(int tower_number) { int top = -1; int tower_height = getTowerHeight(); Tower tower = tower_[tower_number]; for (int i = 0; i < tower_height; ++i) { if (tower.blocks[i]) top = i; } return top; } public void doMove() { doMove(current_move.from_tower, current_move.to_tower); } public void doMove(int from_tower, int to_tower) { int from_top = getTopOfTower(from_tower); Tower tower1 = tower_[from_tower]; Tower tower2 = tower_[to_tower ]; tower1.blocks[from_top] = false; tower2.blocks[from_top] = true; ++number_of_moves; } public int countBlocksOnTower(int tower_number) { int count = 0; int tower_height = getTowerHeight(); Tower tower = tower_[tower_number]; for (int i = 0; i < tower_height; ++i) if (tower.blocks[i]) ++count; return count; } public boolean solved() { int tower_height = getTowerHeight(); Tower tower = tower_[END_TOWER]; for (int i = 0; i < tower_height; ++i) if (! tower.blocks[i]) return false; return true; } public boolean isBlock(int tower, int pos) { assert(tower < NUM_TOWERS); assert(pos < getTowerHeight()); return tower_[tower].blocks[pos]; } public void screenMessage(String message) { centerText(50.0, 100.0, -0.5, -1.0, message); } public void centerText(double x, double y, double xc, double yc, String text) { double text_width; double text_ascent; double text_descent; //canvas.getTextExtents(text, &text_width, &text_ascent, &text_descent); text_width = 10; text_ascent = 12; text_descent = 2; double xo = xc* text_width ; double yo = yc*(text_ascent + text_descent); view_.drawText(x + xo, y + yo, text); } HanoiView view_; Tower tower_[]; int towerHeight_; double total_block_width; double total_block_height; double block_width; double block_height; double border_width; double border_height; double block_spacing; Move current_move; int number_of_moves; int target_number_of_moves; boolean moving_; double press_x_; double press_y_; double move_dx_; double move_dy_; }
/** * Copyright 2017 Alex Yanchenko * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.droidparts.net.image; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.lang.ref.WeakReference; import java.util.LinkedHashSet; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ThreadPoolExecutor; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.BitmapFactory; import android.graphics.Point; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.TransitionDrawable; import android.os.Handler; import android.os.Looper; import android.util.Pair; import android.widget.ImageView; import org.droidparts.concurrent.thread.BackgroundThreadExecutor; import org.droidparts.contract.HTTP.Header; import org.droidparts.inner.BitmapFactoryUtils; import org.droidparts.inner.WeakWrapper; import org.droidparts.net.http.HTTPResponse; import org.droidparts.net.http.RESTClient; import org.droidparts.net.image.cache.BitmapDiskCache; import org.droidparts.net.image.cache.BitmapMemoryCache; import org.droidparts.util.L; import static android.graphics.Color.TRANSPARENT; import static org.droidparts.contract.Constants.BUFFER_SIZE; import static org.droidparts.util.IOUtils.silentlyClose; public class ImageFetcher { private final RESTClient restClient; private final BitmapMemoryCache memoryCache; private final BitmapDiskCache diskCache; protected final ThreadPoolExecutor cacheExecutor; protected final ThreadPoolExecutor fetchExecutor; private final ImageView mockImageView; private final LinkedHashSet<ImageViewSpec> pending = new LinkedHashSet<ImageViewSpec>(); private final Map<ImageViewSpec, Long> wip = new ConcurrentHashMap<ImageViewSpec, Long>(); private Handler handler; private volatile boolean paused; public ImageFetcher(Context ctx) { this(ctx, BitmapMemoryCache.getDefaultInstance(ctx), BitmapDiskCache.getDefaultInstance(ctx)); } public ImageFetcher(Context ctx, BitmapMemoryCache memoryCache, BitmapDiskCache diskCache) { this(ctx, new BackgroundThreadExecutor(2, "ImageFetcher-Fetch"), new RESTClient(ctx), memoryCache, diskCache); } protected ImageFetcher(Context ctx, ThreadPoolExecutor fetchExecutor, RESTClient restClient, BitmapMemoryCache memoryCache, BitmapDiskCache diskCache) { this.fetchExecutor = fetchExecutor; this.restClient = restClient; this.memoryCache = memoryCache; this.diskCache = diskCache; handler = new Handler(Looper.getMainLooper()); cacheExecutor = new BackgroundThreadExecutor(1, "ImageFetcher-Cache"); mockImageView = new ImageView(ctx.getApplicationContext()); } public void pause() { paused = true; } public void resume(boolean executePendingTasks) { paused = false; if (executePendingTasks) { for (ImageViewSpec spec : pending) { ImageView imgView = spec.getObj(); if (imgView != null) { attachImage(spec.imgUrl, imgView, spec.reshaper, spec.crossFadeMillis, spec.listener, spec.inBitmapRef.get()); } } } pending.clear(); } // public void attachImage(String imgUrl, ImageView imageView) { attachImage(imgUrl, imageView, 0); } public void attachImage(String imgUrl, ImageView imageView, int crossFadeMillis) { attachImage(imgUrl, imageView, null, crossFadeMillis); } public void attachImage(String imgUrl, ImageView imageView, ImageReshaper reshaper, int crossFadeMillis) { attachImage(imgUrl, imageView, reshaper, crossFadeMillis, null); } public void attachImage(String imgUrl, ImageView imageView, ImageReshaper reshaper, int crossFadeMillis, ImageFetchListener listener) { attachImage(imgUrl, imageView, reshaper, crossFadeMillis, listener, null); } public void attachImage(String imgUrl, ImageView imageView, ImageReshaper reshaper, int crossFadeMillis, ImageFetchListener listener, Bitmap inBitmap) { ImageViewSpec spec = new ImageViewSpec(imageView, imgUrl, inBitmap, crossFadeMillis, reshaper, listener); long submitted = System.nanoTime(); wip.put(spec, submitted); if (paused) { pending.remove(spec); pending.add(spec); } else { if (listener != null) { listener.onFetchAdded(imageView, imgUrl); } Runnable r = new ReadFromCacheRunnable(spec, submitted); cacheExecutor.remove(r); fetchExecutor.remove(r); cacheExecutor.execute(r); } } public Bitmap getImage(String imgUrl) throws Exception { return getImage(imgUrl, mockImageView, null); } public Bitmap getImage(String imgUrl, ImageView hintImageView, ImageReshaper reshaper) throws Exception { ImageViewSpec spec = new ImageViewSpec(hintImageView, imgUrl, null, 0, reshaper, null); Bitmap bm = readCached(spec); if (bm == null) { Pair<byte[], Pair<Bitmap, BitmapFactory.Options>> bmData = fetchAndDecode(spec); cacheRawImage(imgUrl, bmData.first); bm = reshapeAndCache(spec, bmData.second); } return bm; } // public void clearCacheOlderThan(int hours) { if (diskCache != null) { final long timestamp = System.currentTimeMillis() - hours * 60 * 60 * 1000; cacheExecutor.execute(new Runnable() { @Override public void run() { diskCache.purgeFilesAccessedBefore(timestamp); } }); } else { L.w("Disk cache not set."); } } // Pair<byte[], Pair<Bitmap, BitmapFactory.Options>> fetchAndDecode(final ImageViewSpec spec) throws Exception { final ImageView imgView = spec.getObj(); if (imgView == null) { throw new IllegalStateException("ImageView is null."); } int bytesReadTotal = 0; byte[] buffer = new byte[BUFFER_SIZE]; BufferedInputStream bis = null; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { HTTPResponse resp = restClient.getInputStream(spec.imgUrl); final int kBTotal = resp.getHeaderInt(Header.CONTENT_LENGTH) / 1024; bis = resp.inputStream; int bytesRead; while ((bytesRead = bis.read(buffer)) != -1) { baos.write(buffer, 0, bytesRead); bytesReadTotal += bytesRead; if (spec.listener != null) { final int kBReceived = bytesReadTotal / 1024; runOnUiThread(new Runnable() { @Override public void run() { spec.listener.onFetchProgressChanged(imgView, spec.imgUrl, kBTotal, kBReceived); } }); } } byte[] data = baos.toByteArray(); Pair<Bitmap, BitmapFactory.Options> bm = BitmapFactoryUtils.decodeScaled(data, spec.widthHint, spec.heightHint, spec.configHint, spec.inBitmapRef.get()); return Pair.create(data, bm); } finally { silentlyClose(bis, baos); } } Bitmap readCached(ImageViewSpec spec) { Bitmap bm = null; if (memoryCache != null) { bm = memoryCache.get(spec.cacheKey); } if (bm == null && diskCache != null) { Pair<Bitmap, BitmapFactory.Options> bmData = diskCache.get(spec.cacheKey, spec.widthHint, spec.heightHint, spec.configHint, spec.inBitmapRef.get()); if (bmData != null) { bm = bmData.first; if (memoryCache != null) { memoryCache.put(spec.cacheKey, bm); } } else { bmData = diskCache.get(spec.imgUrl, spec.widthHint, spec.heightHint, spec.configHint, spec.inBitmapRef.get()); if (bmData != null) { bm = reshapeAndCache(spec, bmData); } } } return bm; } void cacheRawImage(String imgUrl, byte[] data) { if (diskCache != null) { diskCache.put(imgUrl, data); } } Bitmap reshapeAndCache(ImageViewSpec spec, Pair<Bitmap, BitmapFactory.Options> bmData) { Bitmap bm = bmData.first; if (spec.reshaper != null) { Bitmap reshapedBm = spec.reshaper.reshape(bm); if (bm != reshapedBm) { bm.recycle(); } bm = reshapedBm; } if (memoryCache != null) { memoryCache.put(spec.cacheKey, bm); } if (diskCache != null && spec.reshaper != null) { Pair<CompressFormat, Integer> cacheFormat = spec.reshaper.getCacheFormat(bmData.second.outMimeType); diskCache.put(spec.cacheKey, bm, cacheFormat); } return bm; } void attachIfMostRecent(ImageViewSpec spec, long submitted, Bitmap bitmap) { Long mostRecent = wip.get(spec); if (mostRecent != null && submitted == mostRecent) { wip.remove(spec); if (!paused || !pending.contains(spec)) { SetBitmapRunnable r = new SetBitmapRunnable(spec, bitmap); runOnUiThread(r); } } } void runOnUiThread(Runnable r) { boolean success = handler.post(r); // a hack while (!success) { handler = new Handler(Looper.getMainLooper()); runOnUiThread(r); } } // static final class ImageViewSpec extends WeakWrapper<ImageView> { final String imgUrl; final WeakReference<Bitmap> inBitmapRef; final int crossFadeMillis; final ImageReshaper reshaper; final ImageFetchListener listener; final String cacheKey; final Bitmap.Config configHint; final int widthHint; final int heightHint; public ImageViewSpec(ImageView imgView, String imgUrl, Bitmap inBitmap, int crossFadeMillis, ImageReshaper reshaper, ImageFetchListener listener) { super(imgView); this.imgUrl = String.valueOf(imgUrl); inBitmapRef = new WeakReference<Bitmap>(inBitmap); this.crossFadeMillis = crossFadeMillis; this.reshaper = reshaper; this.listener = listener; cacheKey = getCacheKey(); configHint = getConfigHint(); Point p = getSizeHint(); widthHint = p.x; heightHint = p.y; } private String getCacheKey() { StringBuilder sb = new StringBuilder(); sb.append(imgUrl); if (reshaper != null) { sb.append("-"); sb.append(reshaper.getCacheId()); } Point p = getSizeHint(); if (p.x > 0 || p.y > 0) { sb.append("-"); sb.append(p.x); sb.append("x"); sb.append(p.y); } return sb.toString(); } private Bitmap.Config getConfigHint() { return (reshaper != null) ? reshaper.getBitmapConfig() : null; } private Point getSizeHint() { Point p = new Point(); if (reshaper != null) { p.x = reshaper.getImageWidthHint(); p.y = reshaper.getImageHeightHint(); } if (p.x <= 0 && p.y <= 0) { p = BitmapFactoryUtils.calcDecodeSizeHint(getObj()); } return p; } } abstract class ImageViewSpecRunnable implements Runnable { final ImageViewSpec spec; final long submitted; public ImageViewSpecRunnable(ImageViewSpec spec, long submitted) { this.spec = spec; this.submitted = submitted; } @Override public boolean equals(Object o) { boolean eq = false; if (this == o) { eq = true; } else if (o instanceof ImageViewSpecRunnable) { eq = spec.equals(((ImageViewSpecRunnable) o).spec); } return eq; } @Override public int hashCode() { return spec.hashCode(); } } class ReadFromCacheRunnable extends ImageViewSpecRunnable { public ReadFromCacheRunnable(ImageViewSpec spec, long submitted) { super(spec, submitted); } @Override public void run() { Bitmap bm = readCached(spec); if (bm == null) { FetchAndCacheRunnable r = new FetchAndCacheRunnable(spec, submitted); fetchExecutor.execute(r); } else { attachIfMostRecent(spec, submitted, bm); } } } class FetchAndCacheRunnable extends ImageViewSpecRunnable { public FetchAndCacheRunnable(ImageViewSpec spec, long submitted) { super(spec, submitted); } @Override public void run() { try { Pair<byte[], Pair<Bitmap, BitmapFactory.Options>> bmData = fetchAndDecode(spec); cacheRawImage(spec.imgUrl, bmData.first); Bitmap bm = reshapeAndCache(spec, bmData.second); attachIfMostRecent(spec, submitted, bm); } catch (final Exception e) { L.w("Failed to fetch '%s'.", spec.imgUrl); L.d(e); final ImageView imgView = spec.getObj(); if (spec.listener != null && imgView != null) { runOnUiThread(new Runnable() { @Override public void run() { spec.listener.onFetchFailed(imgView, spec.imgUrl, e); } }); } } } } class SetBitmapRunnable extends ImageViewSpecRunnable { final Bitmap bitmap; public SetBitmapRunnable(ImageViewSpec spec, Bitmap bitmap) { super(spec, -1); this.bitmap = bitmap; } @Override public void run() { ImageView imgView = spec.getObj(); if (imgView == null) { L.i("ImageView became null (no strong references => GCed)."); } else { if (spec.crossFadeMillis > 0) { Drawable prevDrawable = imgView.getDrawable(); if (prevDrawable == null) { prevDrawable = new ColorDrawable(TRANSPARENT); } Drawable nextDrawable = new BitmapDrawable(imgView.getResources(), bitmap); TransitionDrawable transitionDrawable = new TransitionDrawable( new Drawable[]{prevDrawable, nextDrawable}); imgView.setImageDrawable(transitionDrawable); transitionDrawable.startTransition(spec.crossFadeMillis); } else { imgView.setImageBitmap(bitmap); } if (spec.listener != null) { spec.listener.onFetchCompleted(imgView, spec.imgUrl, bitmap); } } } } }
package fr.coding.yourandroidwebapp.settings; import android.content.Context; import android.content.Intent; import android.content.pm.ShortcutInfo; import android.content.pm.ShortcutManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.Icon; import android.net.Uri; import android.webkit.WebSettings; import android.widget.Toast; import org.json.JSONException; import org.json.JSONObject; import java.util.UUID; import java.util.concurrent.ExecutionException; import fr.coding.tools.DiskCacheRetrieveHttpFile; import fr.coding.yourandroidwebapp.R; import fr.coding.yourandroidwebapp.WebMainActivity; import static android.os.Build.VERSION.SDK_INT; /** * Created by Matthieu on 13/09/2015. */ public class WebApp { // constantes public static final int PinchZoomMode_None = 0; public static final int PinchZoomMode_NoControls = 1; public static final int PinchZoomMode_WithControls = 2; // members public String id; public String name; public String url; public String iconUrl; // alternate url public String alternateUrl; public String alternateSSIDs; public String alternateUrlNotConnected; // ssl public boolean allCertsByPass; public boolean allowedSSlActivated; // auth public boolean autoAuth; // connection public boolean reloadOnConnectionChange; // cache mode public int cacheMode; // pinchzoom mode public int pinchZoomMode; // default values public WebApp() { id = UUID.randomUUID().toString(); allowedSSlActivated = true; autoAuth = true; reloadOnConnectionChange = false; } public String toString() { return name; } public WebApp Duplicate() { WebApp dup = new WebApp(); dup.id = UUID.randomUUID().toString(); dup.name = name + " Copy"; dup.url = url; dup.iconUrl = iconUrl; dup.alternateUrl = alternateUrl; dup.alternateSSIDs = alternateSSIDs; dup.alternateUrlNotConnected = alternateUrlNotConnected; dup.allCertsByPass = allCertsByPass; dup.allowedSSlActivated = allowedSSlActivated; dup.autoAuth = autoAuth; dup.reloadOnConnectionChange = reloadOnConnectionChange; dup.cacheMode = cacheMode; dup.pinchZoomMode = pinchZoomMode; return dup; } public static WebApp JSONobjToWebApp(JSONObject jsonobj) throws JSONException { WebApp webapp = new WebApp(); webapp.id = jsonobj.getString("id"); webapp.name = jsonobj.getString("name"); webapp.url = jsonobj.getString("url"); if (jsonobj.has("iconUrl")) webapp.iconUrl = jsonobj.getString("iconUrl"); if (jsonobj.has("alternateUrl")) webapp.alternateUrl = jsonobj.getString("alternateUrl"); if (jsonobj.has("alternateUrlNotConnected")) webapp.alternateUrlNotConnected = jsonobj.getString("alternateUrlNotConnected"); if (jsonobj.has("alternateSSIDs")) webapp.alternateSSIDs = jsonobj.getString("alternateSSIDs"); if (jsonobj.has("allCertsByPass")) webapp.allCertsByPass = jsonobj.getBoolean("allCertsByPass"); if (jsonobj.has("allowedSSlActivated")) webapp.allowedSSlActivated = jsonobj.getBoolean("allowedSSlActivated"); if (jsonobj.has("autoAuth")) webapp.autoAuth = jsonobj.getBoolean("autoAuth"); if (jsonobj.has("reloadOnConnectionChange")) webapp.reloadOnConnectionChange = jsonobj.getBoolean("reloadOnConnectionChange"); webapp.cacheMode = WebSettings.LOAD_DEFAULT; if (jsonobj.has("cacheMode")) webapp.cacheMode = jsonobj.getInt("cacheMode"); webapp.pinchZoomMode = PinchZoomMode_None; if (jsonobj.has("pinchZoomMode")) webapp.pinchZoomMode = jsonobj.getInt("pinchZoomMode"); return webapp; } public JSONObject toJSONobj() throws JSONException { JSONObject jsonobj = new JSONObject(); jsonobj.put("id", id); jsonobj.put("name", name); jsonobj.put("url", url); jsonobj.put("iconUrl", iconUrl); jsonobj.put("alternateUrl", alternateUrl); jsonobj.put("alternateUrlNotConnected", alternateUrlNotConnected); jsonobj.put("alternateSSIDs", alternateSSIDs); jsonobj.put("allCertsByPass", allCertsByPass); jsonobj.put("allowedSSlActivated", allowedSSlActivated); jsonobj.put("autoAuth", autoAuth); jsonobj.put("reloadOnConnectionChange", reloadOnConnectionChange); jsonobj.put("cacheMode", cacheMode); jsonobj.put("pinchZoomMode", pinchZoomMode); return jsonobj; } public void LauncherShortcut(Context appContext) { WebApp app = this; Bitmap theBitmap = null; if ((app.iconUrl != null) && (!app.iconUrl.isEmpty())) { Uri icon = Uri.parse(app.iconUrl); try { if ((icon.getScheme() != null) && (icon.getScheme().equalsIgnoreCase("file")||icon.getScheme().equalsIgnoreCase("content"))) { theBitmap = BitmapFactory.decodeFile(icon.getPath()); } else { byte[] img = new DiskCacheRetrieveHttpFile(appContext).execute(app.iconUrl).get(); if (img != null) theBitmap = BitmapFactory.decodeByteArray(img, 0, img.length); } } catch (ExecutionException ee) { ee.printStackTrace(); } catch (InterruptedException ee) { ee.printStackTrace(); } if (theBitmap == null) { Toast.makeText(appContext, "Error loading icon, Url : " + app.iconUrl, Toast.LENGTH_LONG).show(); } else { Bitmap scaledBitmap = Bitmap.createScaledBitmap(theBitmap, 256, 256, true); theBitmap = scaledBitmap; } } Intent shortcutIntent = new Intent(appContext, WebMainActivity.class); shortcutIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); shortcutIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); shortcutIntent.setAction("android.intent.action.WEBMAIN"); shortcutIntent.putExtra("webappid", app.id); String shortcutname = app.name; if ((shortcutname == null)||(shortcutname.isEmpty())) { shortcutname = "NoName WebApp"; } ShortcutManager scm = (ShortcutManager)appContext.getSystemService(Context.SHORTCUT_SERVICE); ShortcutInfo.Builder scib = new ShortcutInfo.Builder(appContext, app.id) .setShortLabel(shortcutname) .setIntent(shortcutIntent); if (theBitmap != null) { scib.setIcon(Icon.createWithBitmap(theBitmap)); } else { scib.setIcon(Icon.createWithBitmap(BitmapFactory.decodeResource(appContext.getResources(), R.mipmap.ic_launcher))); } scm.requestPinShortcut(scib.build(), null); } public void StartWebApp(Context ctx) { Intent detailIntent = new Intent(ctx, WebMainActivity.class); detailIntent.putExtra("webappid", id); ctx.startActivity(detailIntent); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io.compress.zlib; import java.io.IOException; import java.util.zip.Checksum; import java.util.zip.DataFormatException; import java.util.zip.Inflater; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DoNotPool; import org.apache.hadoop.util.DataChecksum; /** * A {@link Decompressor} based on the popular gzip compressed file format. * http://www.gzip.org/ * */ @DoNotPool public class BuiltInGzipDecompressor implements Decompressor { private static final int GZIP_MAGIC_ID = 0x8b1f; // if read as LE short int private static final int GZIP_DEFLATE_METHOD = 8; private static final int GZIP_FLAGBIT_HEADER_CRC = 0x02; private static final int GZIP_FLAGBIT_EXTRA_FIELD = 0x04; private static final int GZIP_FLAGBIT_FILENAME = 0x08; private static final int GZIP_FLAGBIT_COMMENT = 0x10; private static final int GZIP_FLAGBITS_RESERVED = 0xe0; // 'true' (nowrap) => Inflater will handle raw deflate stream only private Inflater inflater = new Inflater(true); private byte[] userBuf = null; private int userBufOff = 0; private int userBufLen = 0; private byte[] localBuf = new byte[256]; private int localBufOff = 0; private int headerBytesRead = 0; private int trailerBytesRead = 0; private int numExtraFieldBytesRemaining = -1; private Checksum crc = DataChecksum.newCrc32(); private boolean hasExtraField = false; private boolean hasFilename = false; private boolean hasComment = false; private boolean hasHeaderCRC = false; private GzipStateLabel state; /** * The current state of the gzip decoder, external to the Inflater context. * (Technically, the private variables localBuf through hasHeaderCRC are * also part of the state, so this enum is merely the label for it.) */ private static enum GzipStateLabel { /** * Immediately prior to or (strictly) within the 10-byte basic gzip header. */ HEADER_BASIC, /** * Immediately prior to or within the optional "extra field." */ HEADER_EXTRA_FIELD, /** * Immediately prior to or within the optional filename field. */ HEADER_FILENAME, /** * Immediately prior to or within the optional comment field. */ HEADER_COMMENT, /** * Immediately prior to or within the optional 2-byte header CRC value. */ HEADER_CRC, /** * Immediately prior to or within the main compressed (deflate) data stream. */ DEFLATE_STREAM, /** * Immediately prior to or (strictly) within the 4-byte uncompressed CRC. */ TRAILER_CRC, /** * Immediately prior to or (strictly) within the 4-byte uncompressed size. */ TRAILER_SIZE, /** * Immediately after the trailer (and potentially prior to the next gzip * member/substream header), without reset() having been called. */ FINISHED; } /** * Creates a new (pure Java) gzip decompressor. */ public BuiltInGzipDecompressor() { state = GzipStateLabel.HEADER_BASIC; crc.reset(); // FIXME? Inflater docs say: 'it is also necessary to provide an extra // "dummy" byte as input. This is required by the ZLIB native // library in order to support certain optimizations.' However, // this does not appear to be true, and in any case, it's not // entirely clear where the byte should go or what its value // should be. Perhaps it suffices to have some deflated bytes // in the first buffer load? (But how else would one do it?) } @Override public synchronized boolean needsInput() { if (state == GzipStateLabel.DEFLATE_STREAM) { // most common case return inflater.needsInput(); } // see userBufLen comment at top of decompress(); currently no need to // verify userBufLen <= 0 return (state != GzipStateLabel.FINISHED); } /** {@inheritDoc} */ /* * In our case, the input data includes both gzip header/trailer bytes (which * we handle in executeState()) and deflate-stream bytes (which we hand off * to Inflater). * * NOTE: This code assumes the data passed in via b[] remains unmodified * until _we_ signal that it's safe to modify it (via needsInput()). * The alternative would require an additional buffer-copy even for * the bulk deflate stream, which is a performance hit we don't want * to absorb. (Decompressor now documents this requirement.) */ @Override public synchronized void setInput(byte[] b, int off, int len) { if (b == null) { throw new NullPointerException(); } if (off < 0 || len < 0 || off > b.length - len) { throw new ArrayIndexOutOfBoundsException(); } userBuf = b; userBufOff = off; userBufLen = len; // note: might be zero } /** * Decompress the data (gzip header, deflate stream, gzip trailer) in the * provided buffer. * * @return the number of decompressed bytes placed into b */ /* From the caller's perspective, this is where the state machine lives. * The code is written such that we never return from decompress() with * data remaining in userBuf unless we're in FINISHED state and there was * data beyond the current gzip member (e.g., we're within a concatenated * gzip stream). If this ever changes, {@link #needsInput()} will also * need to be modified (i.e., uncomment the userBufLen condition). * * The actual deflate-stream processing (decompression) is handled by * Java's Inflater class. Unlike the gzip header/trailer code (execute* * methods below), the deflate stream is never copied; Inflater operates * directly on the user's buffer. */ @Override public synchronized int decompress(byte[] b, int off, int len) throws IOException { int numAvailBytes = 0; if (state != GzipStateLabel.DEFLATE_STREAM) { executeHeaderState(); if (userBufLen <= 0) { return numAvailBytes; } } // "executeDeflateStreamState()" if (state == GzipStateLabel.DEFLATE_STREAM) { // hand off user data (or what's left of it) to Inflater--but note that // Inflater may not have consumed all of previous bufferload (e.g., if // data highly compressed or output buffer very small), in which case // userBufLen will be zero if (userBufLen > 0) { inflater.setInput(userBuf, userBufOff, userBufLen); userBufOff += userBufLen; userBufLen = 0; } // now decompress it into b[] try { numAvailBytes = inflater.inflate(b, off, len); } catch (DataFormatException dfe) { throw new IOException(dfe.getMessage()); } crc.update(b, off, numAvailBytes); // CRC-32 is on _uncompressed_ data if (inflater.finished()) { state = GzipStateLabel.TRAILER_CRC; int bytesRemaining = inflater.getRemaining(); assert (bytesRemaining >= 0) : "logic error: Inflater finished; byte-count is inconsistent"; // could save a copy of userBufLen at call to inflater.setInput() and // verify that bytesRemaining <= origUserBufLen, but would have to // be a (class) member variable...seems excessive for a sanity check userBufOff -= bytesRemaining; userBufLen = bytesRemaining; // or "+=", but guaranteed 0 coming in } else { return numAvailBytes; // minor optimization } } executeTrailerState(); return numAvailBytes; } /** * Parse the gzip header (assuming we're in the appropriate state). * In order to deal with degenerate cases (e.g., user buffer is one byte * long), we copy (some) header bytes to another buffer. (Filename, * comment, and extra-field bytes are simply skipped.)</p> * * See http://www.ietf.org/rfc/rfc1952.txt for the gzip spec. Note that * no version of gzip to date (at least through 1.4.0, 2010-01-20) supports * the FHCRC header-CRC16 flagbit; instead, the implementation treats it * as a multi-file continuation flag (which it also doesn't support). :-( * Sun's JDK v6 (1.6) supports the header CRC, however, and so do we. */ private void executeHeaderState() throws IOException { // this can happen because DecompressorStream's decompress() is written // to call decompress() first, setInput() second: if (userBufLen <= 0) { return; } // "basic"/required header: somewhere in first 10 bytes if (state == GzipStateLabel.HEADER_BASIC) { int n = Math.min(userBufLen, 10-localBufOff); // (or 10-headerBytesRead) checkAndCopyBytesToLocal(n); // modifies userBufLen, etc. if (localBufOff >= 10) { // should be strictly == processBasicHeader(); // sig, compression method, flagbits localBufOff = 0; // no further need for basic header state = GzipStateLabel.HEADER_EXTRA_FIELD; } } if (userBufLen <= 0) { return; } // optional header stuff (extra field, filename, comment, header CRC) if (state == GzipStateLabel.HEADER_EXTRA_FIELD) { if (hasExtraField) { // 2 substates: waiting for 2 bytes => get numExtraFieldBytesRemaining, // or already have 2 bytes & waiting to finish skipping specified length if (numExtraFieldBytesRemaining < 0) { int n = Math.min(userBufLen, 2-localBufOff); checkAndCopyBytesToLocal(n); if (localBufOff >= 2) { numExtraFieldBytesRemaining = readUShortLE(localBuf, 0); localBufOff = 0; } } if (numExtraFieldBytesRemaining > 0 && userBufLen > 0) { int n = Math.min(userBufLen, numExtraFieldBytesRemaining); checkAndSkipBytes(n); // modifies userBufLen, etc. numExtraFieldBytesRemaining -= n; } if (numExtraFieldBytesRemaining == 0) { state = GzipStateLabel.HEADER_FILENAME; } } else { state = GzipStateLabel.HEADER_FILENAME; } } if (userBufLen <= 0) { return; } if (state == GzipStateLabel.HEADER_FILENAME) { if (hasFilename) { boolean doneWithFilename = checkAndSkipBytesUntilNull(); if (!doneWithFilename) { return; // exit early: used up entire buffer without hitting NULL } } state = GzipStateLabel.HEADER_COMMENT; } if (userBufLen <= 0) { return; } if (state == GzipStateLabel.HEADER_COMMENT) { if (hasComment) { boolean doneWithComment = checkAndSkipBytesUntilNull(); if (!doneWithComment) { return; // exit early: used up entire buffer } } state = GzipStateLabel.HEADER_CRC; } if (userBufLen <= 0) { return; } if (state == GzipStateLabel.HEADER_CRC) { if (hasHeaderCRC) { assert (localBufOff < 2); int n = Math.min(userBufLen, 2-localBufOff); copyBytesToLocal(n); if (localBufOff >= 2) { long headerCRC = readUShortLE(localBuf, 0); if (headerCRC != (crc.getValue() & 0xffff)) { throw new IOException("gzip header CRC failure"); } localBufOff = 0; crc.reset(); state = GzipStateLabel.DEFLATE_STREAM; } } else { crc.reset(); // will reuse for CRC-32 of uncompressed data state = GzipStateLabel.DEFLATE_STREAM; // switching to Inflater now } } } /** * Parse the gzip trailer (assuming we're in the appropriate state). * In order to deal with degenerate cases (e.g., user buffer is one byte * long), we copy trailer bytes (all 8 of 'em) to a local buffer.</p> * * See http://www.ietf.org/rfc/rfc1952.txt for the gzip spec. */ private void executeTrailerState() throws IOException { if (userBufLen <= 0) { return; } // verify that the CRC-32 of the decompressed stream matches the value // stored in the gzip trailer if (state == GzipStateLabel.TRAILER_CRC) { // localBuf was empty before we handed off to Inflater, so we handle this // exactly like header fields assert (localBufOff < 4); // initially 0, but may need multiple calls int n = Math.min(userBufLen, 4-localBufOff); copyBytesToLocal(n); if (localBufOff >= 4) { long streamCRC = readUIntLE(localBuf, 0); if (streamCRC != crc.getValue()) { throw new IOException("gzip stream CRC failure"); } localBufOff = 0; crc.reset(); state = GzipStateLabel.TRAILER_SIZE; } } if (userBufLen <= 0) { return; } // verify that the mod-2^32 decompressed stream size matches the value // stored in the gzip trailer if (state == GzipStateLabel.TRAILER_SIZE) { assert (localBufOff < 4); // initially 0, but may need multiple calls int n = Math.min(userBufLen, 4-localBufOff); copyBytesToLocal(n); // modifies userBufLen, etc. if (localBufOff >= 4) { // should be strictly == long inputSize = readUIntLE(localBuf, 0); if (inputSize != (inflater.getBytesWritten() & 0xffffffffL)) { throw new IOException( "stored gzip size doesn't match decompressed size"); } localBufOff = 0; state = GzipStateLabel.FINISHED; } } if (state == GzipStateLabel.FINISHED) { return; } } /** * Returns the total number of compressed bytes input so far, including * gzip header/trailer bytes.</p> * * @return the total (non-negative) number of compressed bytes read so far */ public synchronized long getBytesRead() { return headerBytesRead + inflater.getBytesRead() + trailerBytesRead; } /** * Returns the number of bytes remaining in the input buffer; normally * called when finished() is true to determine amount of post-gzip-stream * data. Note that, other than the finished state with concatenated data * after the end of the current gzip stream, this will never return a * non-zero value unless called after {@link #setInput(byte[] b, int off, * int len)} and before {@link #decompress(byte[] b, int off, int len)}. * (That is, after {@link #decompress(byte[] b, int off, int len)} it * always returns zero, except in finished state with concatenated data.)</p> * * @return the total (non-negative) number of unprocessed bytes in input */ @Override public synchronized int getRemaining() { return userBufLen; } @Override public synchronized boolean needsDictionary() { return inflater.needsDictionary(); } @Override public synchronized void setDictionary(byte[] b, int off, int len) { inflater.setDictionary(b, off, len); } /** * Returns true if the end of the gzip substream (single "member") has been * reached.</p> */ @Override public synchronized boolean finished() { return (state == GzipStateLabel.FINISHED); } /** * Resets everything, including the input buffer, regardless of whether the * current gzip substream is finished.</p> */ @Override public synchronized void reset() { // could optionally emit INFO message if state != GzipStateLabel.FINISHED inflater.reset(); state = GzipStateLabel.HEADER_BASIC; crc.reset(); userBufOff = userBufLen = 0; localBufOff = 0; headerBytesRead = 0; trailerBytesRead = 0; numExtraFieldBytesRemaining = -1; hasExtraField = false; hasFilename = false; hasComment = false; hasHeaderCRC = false; } @Override public synchronized void end() { inflater.end(); } /** * Check ID bytes (throw if necessary), compression method (throw if not 8), * and flag bits (set hasExtraField, hasFilename, hasComment, hasHeaderCRC). * Ignore MTIME, XFL, OS. Caller must ensure we have at least 10 bytes (at * the start of localBuf).</p> */ /* * Flag bits (remainder are reserved and must be zero): * bit 0 FTEXT * bit 1 FHCRC (never implemented in gzip, at least through version * 1.4.0; instead interpreted as "continuation of multi- * part gzip file," which is unsupported through 1.4.0) * bit 2 FEXTRA * bit 3 FNAME * bit 4 FCOMMENT * [bit 5 encrypted] */ private void processBasicHeader() throws IOException { if (readUShortLE(localBuf, 0) != GZIP_MAGIC_ID) { throw new IOException("not a gzip file"); } if (readUByte(localBuf, 2) != GZIP_DEFLATE_METHOD) { throw new IOException("gzip data not compressed with deflate method"); } int flg = readUByte(localBuf, 3); if ((flg & GZIP_FLAGBITS_RESERVED) != 0) { throw new IOException("unknown gzip format (reserved flagbits set)"); } hasExtraField = ((flg & GZIP_FLAGBIT_EXTRA_FIELD) != 0); hasFilename = ((flg & GZIP_FLAGBIT_FILENAME) != 0); hasComment = ((flg & GZIP_FLAGBIT_COMMENT) != 0); hasHeaderCRC = ((flg & GZIP_FLAGBIT_HEADER_CRC) != 0); } private void checkAndCopyBytesToLocal(int len) { System.arraycopy(userBuf, userBufOff, localBuf, localBufOff, len); localBufOff += len; // alternatively, could call checkAndSkipBytes(len) for rest... crc.update(userBuf, userBufOff, len); userBufOff += len; userBufLen -= len; headerBytesRead += len; } private void checkAndSkipBytes(int len) { crc.update(userBuf, userBufOff, len); userBufOff += len; userBufLen -= len; headerBytesRead += len; } // returns true if saw NULL, false if ran out of buffer first; called _only_ // during gzip-header processing (not trailer) // (caller can check before/after state of userBufLen to compute num bytes) private boolean checkAndSkipBytesUntilNull() { boolean hitNull = false; if (userBufLen > 0) { do { hitNull = (userBuf[userBufOff] == 0); crc.update(userBuf[userBufOff]); ++userBufOff; --userBufLen; ++headerBytesRead; } while (userBufLen > 0 && !hitNull); } return hitNull; } // this one doesn't update the CRC and does support trailer processing but // otherwise is same as its "checkAnd" sibling private void copyBytesToLocal(int len) { System.arraycopy(userBuf, userBufOff, localBuf, localBufOff, len); localBufOff += len; userBufOff += len; userBufLen -= len; if (state == GzipStateLabel.TRAILER_CRC || state == GzipStateLabel.TRAILER_SIZE) { trailerBytesRead += len; } else { headerBytesRead += len; } } private int readUByte(byte[] b, int off) { return ((int)b[off] & 0xff); } // caller is responsible for not overrunning buffer private int readUShortLE(byte[] b, int off) { return ((((b[off+1] & 0xff) << 8) | ((b[off] & 0xff) )) & 0xffff); } // caller is responsible for not overrunning buffer private long readUIntLE(byte[] b, int off) { return ((((long)(b[off+3] & 0xff) << 24) | ((long)(b[off+2] & 0xff) << 16) | ((long)(b[off+1] & 0xff) << 8) | ((long)(b[off] & 0xff) )) & 0xffffffffL); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ioteventsdata.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Information needed to snooze the alarm. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-data-2018-10-23/SnoozeAlarmActionRequest" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SnoozeAlarmActionRequest implements Serializable, Cloneable, StructuredPojo { /** * <p> * The request ID. Each ID must be unique within each batch. * </p> */ private String requestId; /** * <p> * The name of the alarm model. * </p> */ private String alarmModelName; /** * <p> * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. * </p> */ private String keyValue; /** * <p> * The note that you can leave when you snooze the alarm. * </p> */ private String note; /** * <p> * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this duration. * </p> */ private Integer snoozeDuration; /** * <p> * The request ID. Each ID must be unique within each batch. * </p> * * @param requestId * The request ID. Each ID must be unique within each batch. */ public void setRequestId(String requestId) { this.requestId = requestId; } /** * <p> * The request ID. Each ID must be unique within each batch. * </p> * * @return The request ID. Each ID must be unique within each batch. */ public String getRequestId() { return this.requestId; } /** * <p> * The request ID. Each ID must be unique within each batch. * </p> * * @param requestId * The request ID. Each ID must be unique within each batch. * @return Returns a reference to this object so that method calls can be chained together. */ public SnoozeAlarmActionRequest withRequestId(String requestId) { setRequestId(requestId); return this; } /** * <p> * The name of the alarm model. * </p> * * @param alarmModelName * The name of the alarm model. */ public void setAlarmModelName(String alarmModelName) { this.alarmModelName = alarmModelName; } /** * <p> * The name of the alarm model. * </p> * * @return The name of the alarm model. */ public String getAlarmModelName() { return this.alarmModelName; } /** * <p> * The name of the alarm model. * </p> * * @param alarmModelName * The name of the alarm model. * @return Returns a reference to this object so that method calls can be chained together. */ public SnoozeAlarmActionRequest withAlarmModelName(String alarmModelName) { setAlarmModelName(alarmModelName); return this; } /** * <p> * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. * </p> * * @param keyValue * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. */ public void setKeyValue(String keyValue) { this.keyValue = keyValue; } /** * <p> * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. * </p> * * @return The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. */ public String getKeyValue() { return this.keyValue; } /** * <p> * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. * </p> * * @param keyValue * The value of the key used as a filter to select only the alarms associated with the <a href= * "https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key" * >key</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public SnoozeAlarmActionRequest withKeyValue(String keyValue) { setKeyValue(keyValue); return this; } /** * <p> * The note that you can leave when you snooze the alarm. * </p> * * @param note * The note that you can leave when you snooze the alarm. */ public void setNote(String note) { this.note = note; } /** * <p> * The note that you can leave when you snooze the alarm. * </p> * * @return The note that you can leave when you snooze the alarm. */ public String getNote() { return this.note; } /** * <p> * The note that you can leave when you snooze the alarm. * </p> * * @param note * The note that you can leave when you snooze the alarm. * @return Returns a reference to this object so that method calls can be chained together. */ public SnoozeAlarmActionRequest withNote(String note) { setNote(note); return this; } /** * <p> * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this duration. * </p> * * @param snoozeDuration * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this * duration. */ public void setSnoozeDuration(Integer snoozeDuration) { this.snoozeDuration = snoozeDuration; } /** * <p> * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this duration. * </p> * * @return The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this * duration. */ public Integer getSnoozeDuration() { return this.snoozeDuration; } /** * <p> * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this duration. * </p> * * @param snoozeDuration * The snooze time in seconds. The alarm automatically changes to the <code>NORMAL</code> state after this * duration. * @return Returns a reference to this object so that method calls can be chained together. */ public SnoozeAlarmActionRequest withSnoozeDuration(Integer snoozeDuration) { setSnoozeDuration(snoozeDuration); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRequestId() != null) sb.append("RequestId: ").append(getRequestId()).append(","); if (getAlarmModelName() != null) sb.append("AlarmModelName: ").append(getAlarmModelName()).append(","); if (getKeyValue() != null) sb.append("KeyValue: ").append(getKeyValue()).append(","); if (getNote() != null) sb.append("Note: ").append(getNote()).append(","); if (getSnoozeDuration() != null) sb.append("SnoozeDuration: ").append(getSnoozeDuration()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SnoozeAlarmActionRequest == false) return false; SnoozeAlarmActionRequest other = (SnoozeAlarmActionRequest) obj; if (other.getRequestId() == null ^ this.getRequestId() == null) return false; if (other.getRequestId() != null && other.getRequestId().equals(this.getRequestId()) == false) return false; if (other.getAlarmModelName() == null ^ this.getAlarmModelName() == null) return false; if (other.getAlarmModelName() != null && other.getAlarmModelName().equals(this.getAlarmModelName()) == false) return false; if (other.getKeyValue() == null ^ this.getKeyValue() == null) return false; if (other.getKeyValue() != null && other.getKeyValue().equals(this.getKeyValue()) == false) return false; if (other.getNote() == null ^ this.getNote() == null) return false; if (other.getNote() != null && other.getNote().equals(this.getNote()) == false) return false; if (other.getSnoozeDuration() == null ^ this.getSnoozeDuration() == null) return false; if (other.getSnoozeDuration() != null && other.getSnoozeDuration().equals(this.getSnoozeDuration()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRequestId() == null) ? 0 : getRequestId().hashCode()); hashCode = prime * hashCode + ((getAlarmModelName() == null) ? 0 : getAlarmModelName().hashCode()); hashCode = prime * hashCode + ((getKeyValue() == null) ? 0 : getKeyValue().hashCode()); hashCode = prime * hashCode + ((getNote() == null) ? 0 : getNote().hashCode()); hashCode = prime * hashCode + ((getSnoozeDuration() == null) ? 0 : getSnoozeDuration().hashCode()); return hashCode; } @Override public SnoozeAlarmActionRequest clone() { try { return (SnoozeAlarmActionRequest) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.ioteventsdata.model.transform.SnoozeAlarmActionRequestMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.Da_Technomancer.crossroads.tileentities.rotary; import java.util.ArrayList; import java.util.Map.Entry; import com.Da_Technomancer.crossroads.API.Capabilities; import com.Da_Technomancer.crossroads.API.rotary.IAxisHandler; import com.Da_Technomancer.crossroads.API.rotary.IAxleHandler; import com.Da_Technomancer.crossroads.items.crafting.RecipePredicate; import com.Da_Technomancer.crossroads.items.crafting.RecipeHolder; import net.minecraft.inventory.InventoryHelper; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.EnumFacing; import net.minecraft.util.ITickable; import net.minecraftforge.common.capabilities.Capability; import net.minecraftforge.items.CapabilityItemHandler; import net.minecraftforge.items.IItemHandler; import net.minecraftforge.items.IItemHandlerModifiable; public class GrindstoneTileEntity extends TileEntity implements ITickable{ private ItemStack[] inventory = {ItemStack.EMPTY, ItemStack.EMPTY, ItemStack.EMPTY, ItemStack.EMPTY}; private double progress = 0; public static final double REQUIRED = 100; private final double[] motionData = new double[4]; private void runMachine(){ if(progress == REQUIRED){ return; } double used = Math.min(Math.abs(motionData[1]), REQUIRED - progress); progress += used; axleHandler.addEnergy(-used, false, false); } private void createOutput(ItemStack[] outputs){ if(canFit(outputs)){ progress = 0; inventory[0].shrink(1); for(ItemStack stack : outputs){ int remain = stack.getCount(); for(int slot = 1; slot < 4; slot++){ if(remain > 0 && ItemStack.areItemsEqual(inventory[slot], stack)){ int stored = stack.getMaxStackSize() - inventory[slot].getCount(); inventory[slot] = new ItemStack(stack.getItem(), inventory[slot].getCount() + Math.min(stored, remain), stack.getMetadata()); remain -= stored; } } for(int slot = 1; slot < 4; slot++){ if(remain <= 0){ break; } if(inventory[slot].isEmpty()){ inventory[slot] = new ItemStack(stack.getItem(), Math.min(stack.getMaxStackSize(), remain), stack.getMetadata()); remain -= Math.min(stack.getMaxStackSize(), remain); } } } markDirty(); } } private boolean canFit(ItemStack[] outputs){ boolean viable = true; ArrayList<Integer> locked = new ArrayList<Integer>(); for(ItemStack stack : outputs){ int remain = stack.getCount(); for(int slot : new int[] {1, 2, 3}){ if(!locked.contains(slot) && ItemStack.areItemsEqual(inventory[slot], stack)){ remain -= stack.getMaxStackSize() - inventory[slot].getCount(); } } for(int slot : new int[] {1, 2, 3}){ if(!locked.contains(slot) && remain > 0 && inventory[slot].isEmpty()){ remain -= stack.getMaxStackSize(); locked.add(slot); } } if(remain > 0){ viable = false; break; } } return viable; } @Override public void update(){ if(!world.isRemote){ if(!inventory[0].isEmpty()){ ItemStack[] output = getOutput(); if(output == null){ progress = 0; return; } runMachine(); if(progress == REQUIRED){ createOutput(output); } }else{ progress = 0; } } } private ItemStack[] getOutput(){ for(Entry<RecipePredicate<ItemStack>, ItemStack[]> recipe: RecipeHolder.grindRecipes.entrySet()){ if(recipe.getKey().test(inventory[0])){ return recipe.getValue(); } } return null; } public void dropItems(){ for (int i = 0; i < 4; i++){ InventoryHelper.spawnItemStack(world, pos.getX(), pos.getY(), pos.getZ(), inventory[i]); inventory[i] = ItemStack.EMPTY; } markDirty(); } private final IItemHandler itemOutHandler = new ItemOutHandler(); private final IItemHandler itemInHandler = new ItemInHandler(); private final AllItemHandler itemAllHandler = new AllItemHandler(); private final IAxleHandler axleHandler = new AxleHandler(); @Override public boolean hasCapability(Capability<?> cap, EnumFacing side){ if(cap == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY){ return side != EnumFacing.UP; } if(cap == Capabilities.AXLE_HANDLER_CAPABILITY && side == EnumFacing.UP){ return true; } return super.hasCapability(cap, side); } @SuppressWarnings("unchecked") @Override public <T> T getCapability(Capability<T> cap, EnumFacing side){ if(cap == CapabilityItemHandler.ITEM_HANDLER_CAPABILITY){ if(side == EnumFacing.DOWN){ return (T) itemOutHandler; } if(side == null){ return (T) itemAllHandler; } if(side != EnumFacing.UP){ return (T) itemInHandler; } } if(cap == Capabilities.AXLE_HANDLER_CAPABILITY && side == EnumFacing.UP){ return (T) axleHandler; } return super.getCapability(cap, side); } private class AllItemHandler implements IItemHandlerModifiable{ @Override public int getSlots(){ return 4; } @Override public ItemStack getStackInSlot(int slot){ return inventory[slot]; } @Override public ItemStack insertItem(int slot, ItemStack stack, boolean simulate){ if(slot == 0){ if(!inventory[slot].isEmpty() && !stack.isEmpty() && (!ItemStack.areItemsEqual(stack, inventory[slot]) || !ItemStack.areItemStackTagsEqual(stack, inventory[slot]))){ return stack; } int oldCount = inventory[slot].getCount(); int cap = Math.min(stack.getCount(), stack.getMaxStackSize() - oldCount); ItemStack out = stack.copy(); out.setCount(stack.getCount() - cap); if(!simulate){ markDirty(); inventory[slot] = stack.copy(); inventory[slot].setCount(cap + oldCount); } return out; }else{ return stack; } } @Override public ItemStack extractItem(int slot, int amount, boolean simulate){ int cap = Math.min(amount, inventory[slot].getCount()); if(simulate){ return new ItemStack(inventory[slot].getItem(), cap, inventory[slot].getMetadata()); } markDirty(); return inventory[slot].splitStack(cap); } @Override public int getSlotLimit(int slot){ return 64; } @Override public void setStackInSlot(int slot, ItemStack stack){ inventory[slot] = stack; markDirty(); } } private class ItemOutHandler implements IItemHandler{ @Override public int getSlots(){ return 3; } @Override public ItemStack getStackInSlot(int slot){ if(slot > 2 || slot < 0){ return ItemStack.EMPTY; } return inventory[slot + 1]; } @Override public ItemStack insertItem(int slot, ItemStack stack, boolean simulate){ return stack; } @Override public ItemStack extractItem(int slot, int amount, boolean simulate){ if(slot < 0 || slot > 2){ return ItemStack.EMPTY; } int cap = Math.min(amount, inventory[slot + 1].getCount()); if(simulate){ return new ItemStack(inventory[slot + 1].getItem(), cap, inventory[slot + 1].getMetadata()); } markDirty(); return inventory[slot + 1].splitStack(cap); } @Override public int getSlotLimit(int slot){ return 64; } } private class ItemInHandler implements IItemHandler{ @Override public int getSlots(){ return 1; } @Override public ItemStack getStackInSlot(int slot){ return slot == 0 ? inventory[0] : ItemStack.EMPTY; } @Override public ItemStack insertItem(int slot, ItemStack stack, boolean simulate){ if(slot != 0 || (!inventory[0].isEmpty() && !stack.isEmpty() && (!ItemStack.areItemsEqual(stack, inventory[0]) || !ItemStack.areItemStackTagsEqual(stack, inventory[0])))){ return stack; } int oldCount = inventory[0].getCount(); int cap = Math.min(stack.getCount(), stack.getMaxStackSize() - oldCount); ItemStack out = stack.copy(); out.setCount(stack.getCount() - cap); if(!simulate){ markDirty(); inventory[0] = stack.copy(); inventory[0].setCount(cap + oldCount); } return out; } @Override public ItemStack extractItem(int slot, int amount, boolean simulate){ return ItemStack.EMPTY; } @Override public int getSlotLimit(int slot){ return 64; } } private class AxleHandler implements IAxleHandler{ @Override public double[] getMotionData(){ return motionData; } private double rotRatio; private byte updateKey; @Override public void propogate(IAxisHandler masterIn, byte key, double rotRatioIn, double lastRadius){ //If true, this has already been checked. if(key == updateKey || masterIn.addToList(this)){ return; } rotRatio = rotRatioIn == 0 ? 1 : rotRatioIn; updateKey = key; } @Override public double getMoInertia(){ return 1; } @Override public double getRotationRatio(){ return rotRatio; } @Override public void addEnergy(double energy, boolean allowInvert, boolean absolute){ if(allowInvert && absolute){ motionData[1] += energy; }else if(allowInvert){ motionData[1] += energy * Math.signum(motionData[1]); }else if(absolute){ int sign = (int) Math.signum(motionData[1]); motionData[1] += energy; if(sign != 0 && Math.signum(motionData[1]) != sign){ motionData[1] = 0; } }else{ int sign = (int) Math.signum(motionData[1]); motionData[1] += energy * ((double) sign); if(Math.signum(motionData[1]) != sign){ motionData[1] = 0; } } markDirty(); } @Override public void markChanged(){ markDirty(); } @Override public boolean shouldManageAngle(){ return false; } } public int getProgress(){ return (int) progress; } public void setProgress(int value){ progress = value; } @Override public NBTTagCompound writeToNBT(NBTTagCompound nbt){ super.writeToNBT(nbt); for(int i = 0; i < 4; ++i){ if(!inventory[i].isEmpty()){ NBTTagCompound stackTag = new NBTTagCompound(); inventory[i].writeToNBT(stackTag); nbt.setTag("inv" + i, stackTag); nbt.setDouble("motion" + i, motionData[i]); } } return nbt; } @Override public void readFromNBT(NBTTagCompound nbt){ super.readFromNBT(nbt); for(int i = 0; i < 4; ++i){ if(nbt.hasKey("inv" + i)){ inventory[i] = new ItemStack(nbt.getCompoundTag("inv" + i)); } motionData[i] = nbt.getDouble("motion" + i); } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * */ public class GetGroupRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the group. * </p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for this * parameter is a string of characters consisting of upper and lowercase * alphanumeric characters with no spaces. You can also include any of the * following characters: =,.@- * </p> */ private String groupName; /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> */ private String marker; /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> */ private Integer maxItems; /** * Default constructor for GetGroupRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize the object after * creating it. */ public GetGroupRequest() { } /** * Constructs a new GetGroupRequest object. Callers should use the setter or * fluent setter (with...) methods to initialize any additional object * members. * * @param groupName * The name of the group.</p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> * for this parameter is a string of characters consisting of upper * and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: =,.@- */ public GetGroupRequest(String groupName) { setGroupName(groupName); } /** * <p> * The name of the group. * </p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for this * parameter is a string of characters consisting of upper and lowercase * alphanumeric characters with no spaces. You can also include any of the * following characters: =,.@- * </p> * * @param groupName * The name of the group.</p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> * for this parameter is a string of characters consisting of upper * and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: =,.@- */ public void setGroupName(String groupName) { this.groupName = groupName; } /** * <p> * The name of the group. * </p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for this * parameter is a string of characters consisting of upper and lowercase * alphanumeric characters with no spaces. You can also include any of the * following characters: =,.@- * </p> * * @return The name of the group.</p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> * for this parameter is a string of characters consisting of upper * and lowercase alphanumeric characters with no spaces. You can * also include any of the following characters: =,.@- */ public String getGroupName() { return this.groupName; } /** * <p> * The name of the group. * </p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for this * parameter is a string of characters consisting of upper and lowercase * alphanumeric characters with no spaces. You can also include any of the * following characters: =,.@- * </p> * * @param groupName * The name of the group.</p> * <p> * The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> * for this parameter is a string of characters consisting of upper * and lowercase alphanumeric characters with no spaces. You can also * include any of the following characters: =,.@- * @return Returns a reference to this object so that method calls can be * chained together. */ public GetGroupRequest withGroupName(String groupName) { setGroupName(groupName); return this; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set * it to the value of the <code>Marker</code> element in the response * that you received to indicate where the next call should start. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @return Use this parameter only when paginating results and only after * you receive a response indicating that the results are truncated. * Set it to the value of the <code>Marker</code> element in the * response that you received to indicate where the next call should * start. */ public String getMarker() { return this.marker; } /** * <p> * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set it to * the value of the <code>Marker</code> element in the response that you * received to indicate where the next call should start. * </p> * * @param marker * Use this parameter only when paginating results and only after you * receive a response indicating that the results are truncated. Set * it to the value of the <code>Marker</code> element in the response * that you received to indicate where the next call should start. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetGroupRequest withMarker(String marker) { setMarker(marker); return this; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the <code>IsTruncated</code> * response element is <code>true</code>.</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. */ public void setMaxItems(Integer maxItems) { this.maxItems = maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @return Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the * <code>IsTruncated</code> response element is <code>true</code> * .</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. */ public Integer getMaxItems() { return this.maxItems; } /** * <p> * Use this only when paginating results to indicate the maximum number of * items you want in the response. If additional items exist beyond the * maximum you specify, the <code>IsTruncated</code> response element is * <code>true</code>. * </p> * <p> * This parameter is optional. If you do not include it, it defaults to 100. * Note that IAM might return fewer results, even when there are more * results available. In that case, the <code>IsTruncated</code> response * element returns <code>true</code> and <code>Marker</code> contains a * value to include in the subsequent call that tells the service where to * continue from. * </p> * * @param maxItems * Use this only when paginating results to indicate the maximum * number of items you want in the response. If additional items * exist beyond the maximum you specify, the <code>IsTruncated</code> * response element is <code>true</code>.</p> * <p> * This parameter is optional. If you do not include it, it defaults * to 100. Note that IAM might return fewer results, even when there * are more results available. In that case, the * <code>IsTruncated</code> response element returns * <code>true</code> and <code>Marker</code> contains a value to * include in the subsequent call that tells the service where to * continue from. * @return Returns a reference to this object so that method calls can be * chained together. */ public GetGroupRequest withMaxItems(Integer maxItems) { setMaxItems(maxItems); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getGroupName() != null) sb.append("GroupName: " + getGroupName() + ","); if (getMarker() != null) sb.append("Marker: " + getMarker() + ","); if (getMaxItems() != null) sb.append("MaxItems: " + getMaxItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetGroupRequest == false) return false; GetGroupRequest other = (GetGroupRequest) obj; if (other.getGroupName() == null ^ this.getGroupName() == null) return false; if (other.getGroupName() != null && other.getGroupName().equals(this.getGroupName()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getGroupName() == null) ? 0 : getGroupName().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); return hashCode; } @Override public GetGroupRequest clone() { return (GetGroupRequest) super.clone(); } }
/* * Copyright (c) 2016 acmi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package acmi.l2.clientmod.util; import acmi.l2.clientmod.io.UnrealPackage; import acmi.l2.clientmod.l2smr.StaticMeshActorUtil; import acmi.l2.clientmod.unreal.properties.PropertiesUtil.Type; import javafx.scene.control.Alert; import javafx.scene.control.ButtonType; import javafx.scene.control.TextField; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.List; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; import static acmi.l2.clientmod.io.BufferUtil.getCompactInt; public class Util { public static final FileFilter MAP_FILE_FILTER = pathname -> (pathname != null) && (pathname.isFile()) && (pathname.getName().endsWith(".unr")); public static final FileFilter STATICMESH_FILE_FILTER = pathname -> (pathname != null) && (pathname.isFile()) && (pathname.getName().endsWith(".usx")); public static Float getFloat(TextField textField, Float defaultValue) { try { return Float.valueOf(textField.getText()); } catch (NumberFormatException nfe) { return defaultValue; } } public static Integer getInt(TextField textField, Integer defaultValue) { try { return Integer.valueOf(textField.getText()); } catch (NumberFormatException nfe) { return defaultValue; } } public static double range(float[] loc, Double x, Double y, Double z) { double s = 0.0; if (x != null) s += Math.pow(loc[0] - x, 2.0); if (y != null) s += Math.pow(loc[1] - y, 2.0); if (z != null) s += Math.pow(loc[2] - z, 2.0); return Math.sqrt(s); } public static Double getDoubleOrClearTextField(TextField textField) { try { return Double.valueOf(textField.getText()); } catch (NumberFormatException nfe) { if (!textField.getText().equals("-")) textField.setText(""); return null; } } public static void showAlert(Alert.AlertType alertType, String title, String header, String content) { Alert alert = new Alert(alertType); alert.setTitle(title); alert.setHeaderText(header); alert.setContentText(content); alert.show(); } public static boolean showConfirm(Alert.AlertType alertType, String title, String header, String content) { Alert alert = new Alert(alertType); alert.setTitle(title); alert.setHeaderText(header); alert.setContentText(content); alert.getButtonTypes().setAll(ButtonType.YES, ButtonType.NO); return ButtonType.YES == alert.showAndWait().orElse(ButtonType.NO); } public static void readStateFrame(ByteBuffer buffer) throws BufferUnderflowException { getCompactInt(buffer); getCompactInt(buffer); buffer.getLong(); buffer.getInt(); getCompactInt(buffer); } public static void iterateProperties(ByteBuffer buffer, UnrealPackage up, TriConsumer<String, Integer, ByteBuffer> func) throws BufferUnderflowException { String name; while (!"None".equals(name = up.getNameTable().get(getCompactInt(buffer)).getName())) { byte info = buffer.get(); Type type = Type.values()[info & 15]; int size = (info & 112) >> 4; boolean array = (info & 128) == 128; if (type == Type.STRUCT) { getCompactInt(buffer); } size = StaticMeshActorUtil.getSize(size, buffer); if (array && type != Type.BOOL) { buffer.get(); } byte[] obj = new byte[size]; int pos = buffer.position(); buffer.get(obj); func.accept(name, pos, ByteBuffer.wrap(obj).order(ByteOrder.LITTLE_ENDIAN)); } } public static int getXY(File mapDir, String mapName) throws IOException { int[] m = new int[2]; try (UnrealPackage up = new UnrealPackage(new File(mapDir, mapName), true)) { List<UnrealPackage.ExportEntry> infos = up.getExportTable().stream() .filter(e -> e.getObjectClass().getObjectFullName().equals("Engine.TerrainInfo")) .collect(Collectors.toList()); for (UnrealPackage.ExportEntry e : infos) { byte[] staticMeshActor = e.getObjectRawData(); ByteBuffer buffer = ByteBuffer.wrap(staticMeshActor); buffer.order(ByteOrder.LITTLE_ENDIAN); readStateFrame(buffer); iterateProperties(buffer, up, (name, pos, obj) -> { switch (name) { case "MapX": m[0] = obj.getInt(); break; case "MapY": m[1] = obj.getInt(); break; } }); } } return m[0] | (m[1] << 8); } public static CharSequence tab(int indent) { StringBuilder sb = new StringBuilder(indent); for (int i = 0; i < indent; i++) sb.append('\t'); return sb; } public static CharSequence newLine(int indent) { StringBuilder sb = new StringBuilder("\r\n"); sb.append(tab(indent)); return sb; } public static CharSequence newLine() { return newLine(0); } public static Throwable getTop(Throwable t) { while (t.getCause() != null) t = t.getCause(); return t; } public static Predicate<File> nameFilter(String name, String... extensions) { if (extensions.length == 0) return f -> f.getName().equalsIgnoreCase(name); return f -> Arrays.stream(extensions) .map(ext -> name + "." + ext) .anyMatch(fn -> f.getName().equalsIgnoreCase(fn)); } @SafeVarargs public static File find(File folder, Predicate<File>... filters) { if (folder == null) return null; File[] children = folder.listFiles(); if (children == null) return null; Stream<File> stream = Arrays.stream(children); for (Predicate<File> filter : filters) stream = stream.filter(filter); return stream .findAny() .orElse(null); } }
/******************************************************************************* * Copyright (c) 2000, 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.compiler.impl; //import checkers.inference.ownership.quals.*;; import java.io.ByteArrayInputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; import org.eclipse.jdt.core.compiler.CharOperation; import org.eclipse.jdt.internal.compiler.Compiler; import org.eclipse.jdt.internal.compiler.ast.ASTNode; import org.eclipse.jdt.internal.compiler.classfmt.ClassFileConstants; import org.eclipse.jdt.internal.compiler.problem.ProblemSeverities; public class CompilerOptions { /** * Option IDs */ public static final String OPTION_LocalVariableAttribute = "org.eclipse.jdt.core.compiler.debug.localVariable"; //$NON-NLS-1$ public static final String OPTION_LineNumberAttribute = "org.eclipse.jdt.core.compiler.debug.lineNumber"; //$NON-NLS-1$ public static final String OPTION_SourceFileAttribute = "org.eclipse.jdt.core.compiler.debug.sourceFile"; //$NON-NLS-1$ public static final String OPTION_PreserveUnusedLocal = "org.eclipse.jdt.core.compiler.codegen.unusedLocal"; //$NON-NLS-1$ public static final String OPTION_DocCommentSupport= "org.eclipse.jdt.core.compiler.doc.comment.support"; //$NON-NLS-1$ public static final String OPTION_ReportMethodWithConstructorName = "org.eclipse.jdt.core.compiler.problem.methodWithConstructorName"; //$NON-NLS-1$ public static final String OPTION_ReportOverridingPackageDefaultMethod = "org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod"; //$NON-NLS-1$ public static final String OPTION_ReportDeprecation = "org.eclipse.jdt.core.compiler.problem.deprecation"; //$NON-NLS-1$ public static final String OPTION_ReportDeprecationInDeprecatedCode = "org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode"; //$NON-NLS-1$ public static final String OPTION_ReportDeprecationWhenOverridingDeprecatedMethod = "org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod"; //$NON-NLS-1$ public static final String OPTION_ReportHiddenCatchBlock = "org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedLocal = "org.eclipse.jdt.core.compiler.problem.unusedLocal"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedParameter = "org.eclipse.jdt.core.compiler.problem.unusedParameter"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedParameterWhenImplementingAbstract = "org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedParameterWhenOverridingConcrete = "org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedImport = "org.eclipse.jdt.core.compiler.problem.unusedImport"; //$NON-NLS-1$ public static final String OPTION_ReportSyntheticAccessEmulation = "org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation"; //$NON-NLS-1$ public static final String OPTION_ReportNoEffectAssignment = "org.eclipse.jdt.core.compiler.problem.noEffectAssignment"; //$NON-NLS-1$ public static final String OPTION_ReportLocalVariableHiding = "org.eclipse.jdt.core.compiler.problem.localVariableHiding"; //$NON-NLS-1$ public static final String OPTION_ReportSpecialParameterHidingField = "org.eclipse.jdt.core.compiler.problem.specialParameterHidingField"; //$NON-NLS-1$ public static final String OPTION_ReportFieldHiding = "org.eclipse.jdt.core.compiler.problem.fieldHiding"; //$NON-NLS-1$ public static final String OPTION_ReportTypeParameterHiding = "org.eclipse.jdt.core.compiler.problem.typeParameterHiding"; //$NON-NLS-1$ public static final String OPTION_ReportPossibleAccidentalBooleanAssignment = "org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment"; //$NON-NLS-1$ public static final String OPTION_ReportNonExternalizedStringLiteral = "org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral"; //$NON-NLS-1$ public static final String OPTION_ReportIncompatibleNonInheritedInterfaceMethod = "org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedPrivateMember = "org.eclipse.jdt.core.compiler.problem.unusedPrivateMember"; //$NON-NLS-1$ public static final String OPTION_ReportNoImplicitStringConversion = "org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion"; //$NON-NLS-1$ public static final String OPTION_ReportAssertIdentifier = "org.eclipse.jdt.core.compiler.problem.assertIdentifier"; //$NON-NLS-1$ public static final String OPTION_ReportEnumIdentifier = "org.eclipse.jdt.core.compiler.problem.enumIdentifier"; //$NON-NLS-1$ public static final String OPTION_ReportNonStaticAccessToStatic = "org.eclipse.jdt.core.compiler.problem.staticAccessReceiver"; //$NON-NLS-1$ public static final String OPTION_ReportIndirectStaticAccess = "org.eclipse.jdt.core.compiler.problem.indirectStaticAccess"; //$NON-NLS-1$ public static final String OPTION_ReportEmptyStatement = "org.eclipse.jdt.core.compiler.problem.emptyStatement"; //$NON-NLS-1$ public static final String OPTION_ReportUnnecessaryTypeCheck = "org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck"; //$NON-NLS-1$ public static final String OPTION_ReportUnnecessaryElse = "org.eclipse.jdt.core.compiler.problem.unnecessaryElse"; //$NON-NLS-1$ public static final String OPTION_ReportUndocumentedEmptyBlock = "org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock"; //$NON-NLS-1$ public static final String OPTION_ReportInvalidJavadoc = "org.eclipse.jdt.core.compiler.problem.invalidJavadoc"; //$NON-NLS-1$ public static final String OPTION_ReportInvalidJavadocTags = "org.eclipse.jdt.core.compiler.problem.invalidJavadocTags"; //$NON-NLS-1$ public static final String OPTION_ReportInvalidJavadocTagsDeprecatedRef = "org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef"; //$NON-NLS-1$ public static final String OPTION_ReportInvalidJavadocTagsNotVisibleRef = "org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef"; //$NON-NLS-1$ public static final String OPTION_ReportInvalidJavadocTagsVisibility = "org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocTags = "org.eclipse.jdt.core.compiler.problem.missingJavadocTags"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocTagsVisibility = "org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocTagsOverriding = "org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocComments = "org.eclipse.jdt.core.compiler.problem.missingJavadocComments"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocCommentsVisibility = "org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadocCommentsOverriding = "org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding"; //$NON-NLS-1$ public static final String OPTION_ReportFinallyBlockNotCompletingNormally = "org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedDeclaredThrownException = "org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedDeclaredThrownExceptionWhenOverriding = "org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding"; //$NON-NLS-1$ public static final String OPTION_ReportUnqualifiedFieldAccess = "org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess"; //$NON-NLS-1$ public static final String OPTION_ReportUncheckedTypeOperation = "org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation"; //$NON-NLS-1$ public static final String OPTION_ReportRawTypeReference = "org.eclipse.jdt.core.compiler.problem.rawTypeReference"; //$NON-NLS-1$ public static final String OPTION_ReportFinalParameterBound = "org.eclipse.jdt.core.compiler.problem.finalParameterBound"; //$NON-NLS-1$ public static final String OPTION_ReportMissingSerialVersion = "org.eclipse.jdt.core.compiler.problem.missingSerialVersion"; //$NON-NLS-1$ public static final String OPTION_ReportVarargsArgumentNeedCast = "org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast"; //$NON-NLS-1$ public static final String OPTION_Source = "org.eclipse.jdt.core.compiler.source"; //$NON-NLS-1$ public static final String OPTION_TargetPlatform = "org.eclipse.jdt.core.compiler.codegen.targetPlatform"; //$NON-NLS-1$ public static final String OPTION_Compliance = "org.eclipse.jdt.core.compiler.compliance"; //$NON-NLS-1$ public static final String OPTION_Encoding = "org.eclipse.jdt.core.encoding"; //$NON-NLS-1$ public static final String OPTION_MaxProblemPerUnit = "org.eclipse.jdt.core.compiler.maxProblemPerUnit"; //$NON-NLS-1$ public static final String OPTION_TaskTags = "org.eclipse.jdt.core.compiler.taskTags"; //$NON-NLS-1$ public static final String OPTION_TaskPriorities = "org.eclipse.jdt.core.compiler.taskPriorities"; //$NON-NLS-1$ public static final String OPTION_TaskCaseSensitive = "org.eclipse.jdt.core.compiler.taskCaseSensitive"; //$NON-NLS-1$ public static final String OPTION_InlineJsr = "org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode"; //$NON-NLS-1$ public static final String OPTION_ReportNullReference = "org.eclipse.jdt.core.compiler.problem.nullReference"; //$NON-NLS-1$ public static final String OPTION_ReportAutoboxing = "org.eclipse.jdt.core.compiler.problem.autoboxing"; //$NON-NLS-1$ public static final String OPTION_ReportAnnotationSuperInterface = "org.eclipse.jdt.core.compiler.problem.annotationSuperInterface"; //$NON-NLS-1$ public static final String OPTION_ReportMissingOverrideAnnotation = "org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation"; //$NON-NLS-1$ public static final String OPTION_ReportMissingDeprecatedAnnotation = "org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation"; //$NON-NLS-1$ public static final String OPTION_ReportIncompleteEnumSwitch = "org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch"; //$NON-NLS-1$ public static final String OPTION_ReportForbiddenReference = "org.eclipse.jdt.core.compiler.problem.forbiddenReference"; //$NON-NLS-1$ public static final String OPTION_ReportDiscouragedReference = "org.eclipse.jdt.core.compiler.problem.discouragedReference"; //$NON-NLS-1$ public static final String OPTION_SuppressWarnings = "org.eclipse.jdt.core.compiler.problem.suppressWarnings"; //$NON-NLS-1$ public static final String OPTION_ReportUnhandledWarningToken = "org.eclipse.jdt.core.compiler.problem.unhandledWarningToken"; //$NON-NLS-1$ public static final String OPTION_ReportUnusedLabel = "org.eclipse.jdt.core.compiler.problem.unusedLabel"; //$NON-NLS-1$ public static final String OPTION_FatalOptionalError = "org.eclipse.jdt.core.compiler.problem.fatalOptionalError"; //$NON-NLS-1$ public static final String OPTION_ReportParameterAssignment = "org.eclipse.jdt.core.compiler.problem.parameterAssignment"; //$NON-NLS-1$ // Backward compatibility public static final String OPTION_ReportInvalidAnnotation = "org.eclipse.jdt.core.compiler.problem.invalidAnnotation"; //$NON-NLS-1$ public static final String OPTION_ReportMissingAnnotation = "org.eclipse.jdt.core.compiler.problem.missingAnnotation"; //$NON-NLS-1$ public static final String OPTION_ReportMissingJavadoc = "org.eclipse.jdt.core.compiler.problem.missingJavadoc"; //$NON-NLS-1$ /* should surface ??? */ public static final String OPTION_PrivateConstructorAccess = "org.eclipse.jdt.core.compiler.codegen.constructorAccessEmulation"; //$NON-NLS-1$ /** * Possible values for configurable options */ public static final String GENERATE = "generate";//$NON-NLS-1$ public static final String DO_NOT_GENERATE = "do not generate"; //$NON-NLS-1$ public static final String PRESERVE = "preserve"; //$NON-NLS-1$ public static final String OPTIMIZE_OUT = "optimize out"; //$NON-NLS-1$ public static final String VERSION_1_1 = "1.1"; //$NON-NLS-1$ public static final String VERSION_1_2 = "1.2"; //$NON-NLS-1$ public static final String VERSION_1_3 = "1.3"; //$NON-NLS-1$ public static final String VERSION_1_4 = "1.4"; //$NON-NLS-1$ public static final String VERSION_1_5 = "1.5"; //$NON-NLS-1$ public static final String VERSION_1_6 = "1.6"; //$NON-NLS-1$ public static final String ERROR = "error"; //$NON-NLS-1$ public static final String WARNING = "warning"; //$NON-NLS-1$ public static final String IGNORE = "ignore"; //$NON-NLS-1$ public static final String ENABLED = "enabled"; //$NON-NLS-1$ public static final String DISABLED = "disabled"; //$NON-NLS-1$ public static final String PUBLIC = "public"; //$NON-NLS-1$ public static final String PROTECTED = "protected"; //$NON-NLS-1$ public static final String DEFAULT = "default"; //$NON-NLS-1$ public static final String PRIVATE = "private"; //$NON-NLS-1$ /** * Bit mask for configurable problems (error/warning threshold) */ public static final long MethodWithConstructorName = ASTNode.Bit1; public static final long OverriddenPackageDefaultMethod = ASTNode.Bit2; public static final long UsingDeprecatedAPI = ASTNode.Bit3; public static final long MaskedCatchBlock = ASTNode.Bit4; public static final long UnusedLocalVariable = ASTNode.Bit5; public static final long UnusedArgument = ASTNode.Bit6; public static final long NoImplicitStringConversion = ASTNode.Bit7; public static final long AccessEmulation = ASTNode.Bit8; public static final long NonExternalizedString = ASTNode.Bit9; public static final long AssertUsedAsAnIdentifier = ASTNode.Bit10; public static final long UnusedImport = ASTNode.Bit11; public static final long NonStaticAccessToStatic = ASTNode.Bit12; public static final long Task = ASTNode.Bit13; public static final long NoEffectAssignment = ASTNode.Bit14; public static final long IncompatibleNonInheritedInterfaceMethod = ASTNode.Bit15; public static final long UnusedPrivateMember = ASTNode.Bit16; public static final long LocalVariableHiding = ASTNode.Bit17; public static final long FieldHiding = ASTNode.Bit18; public static final long AccidentalBooleanAssign = ASTNode.Bit19; public static final long EmptyStatement = ASTNode.Bit20; public static final long MissingJavadocComments = ASTNode.Bit21; public static final long MissingJavadocTags = ASTNode.Bit22; public static final long UnqualifiedFieldAccess = ASTNode.Bit23; public static final long UnusedDeclaredThrownException = ASTNode.Bit24; public static final long FinallyBlockNotCompleting = ASTNode.Bit25; public static final long InvalidJavadoc = ASTNode.Bit26; public static final long UnnecessaryTypeCheck = ASTNode.Bit27; public static final long UndocumentedEmptyBlock = ASTNode.Bit28; public static final long IndirectStaticAccess = ASTNode.Bit29; public static final long UnnecessaryElse = ASTNode.Bit30; public static final long UncheckedTypeOperation = ASTNode.Bit31; public static final long FinalParameterBound = ASTNode.Bit32L; public static final long MissingSerialVersion = ASTNode.Bit33L; public static final long EnumUsedAsAnIdentifier = ASTNode.Bit34L; public static final long ForbiddenReference = ASTNode.Bit35L; public static final long VarargsArgumentNeedCast = ASTNode.Bit36L; public static final long NullReference = ASTNode.Bit37L; public static final long AutoBoxing = ASTNode.Bit38L; public static final long AnnotationSuperInterface = ASTNode.Bit39L; public static final long TypeParameterHiding = ASTNode.Bit40L; public static final long MissingOverrideAnnotation = ASTNode.Bit41L; public static final long IncompleteEnumSwitch = ASTNode.Bit42L; public static final long MissingDeprecatedAnnotation = ASTNode.Bit43L; public static final long DiscouragedReference = ASTNode.Bit44L; public static final long UnhandledWarningToken = ASTNode.Bit45L; public static final long RawTypeReference = ASTNode.Bit46L; public static final long UnusedLabel = ASTNode.Bit47L; public static final long ParameterAssignment = ASTNode.Bit48L; // Default severity level for handlers public long errorThreshold = 0; public long warningThreshold = MethodWithConstructorName | UsingDeprecatedAPI | MaskedCatchBlock | OverriddenPackageDefaultMethod | UnusedImport | NonStaticAccessToStatic | NoEffectAssignment | IncompatibleNonInheritedInterfaceMethod | NoImplicitStringConversion | FinallyBlockNotCompleting | AssertUsedAsAnIdentifier | EnumUsedAsAnIdentifier | UncheckedTypeOperation | MissingSerialVersion | VarargsArgumentNeedCast | ForbiddenReference | DiscouragedReference | AnnotationSuperInterface | TypeParameterHiding | FinalParameterBound | UnhandledWarningToken | UnusedLocalVariable | UnusedPrivateMember | UnusedLabel /*| NullReference*/; // Debug attributes public static final int Source = 1; // SourceFileAttribute public static final int Lines = 2; // LineNumberAttribute public static final int Vars = 4; // LocalVariableTableAttribute // By default only lines and source attributes are generated. public int produceDebugAttributes = Lines | Source; public long complianceLevel = ClassFileConstants.JDK1_4; // by default be compliant with 1.4 public long sourceLevel = ClassFileConstants.JDK1_3; //1.3 source behavior by default public long targetJDK = ClassFileConstants.JDK1_2; // default generates for JVM1.2 // toggle private access emulation for 1.2 (constr. accessor has extra arg on constructor) or 1.3 (make private constructor default access when access needed) public boolean isPrivateConstructorAccessChangingVisibility = false; // by default, follows 1.2 // source encoding format public String defaultEncoding = null; // will use the platform default encoding // print what unit is being processed public boolean verbose = Compiler.DEBUG; // indicates if reference info is desired public boolean produceReferenceInfo = false; // indicates if unused/optimizable local variables need to be preserved (debugging purpose) public boolean preserveAllLocalVariables = false; // indicates whether literal expressions are inlined at parse-time or not public boolean parseLiteralExpressionsAsConstants = true; // max problems per compilation unit public int maxProblemsPerUnit = 100; // no more than 100 problems per default // tags used to recognize tasks in comments public char[][] taskTags = null; public char[][] taskPriorites = null; public boolean isTaskCaseSensitive = true; // deprecation report public boolean reportDeprecationInsideDeprecatedCode = false; public boolean reportDeprecationWhenOverridingDeprecatedMethod = false; // unused parameters report public boolean reportUnusedParameterWhenImplementingAbstract = false; public boolean reportUnusedParameterWhenOverridingConcrete = false; // unused declaration of thrown exception public boolean reportUnusedDeclaredThrownExceptionWhenOverriding = false; // constructor/setter parameter hiding public boolean reportSpecialParameterHidingField = false; // check javadoc comments tags public int reportInvalidJavadocTagsVisibility = ClassFileConstants.AccPublic; public boolean reportInvalidJavadocTags = false; public boolean reportInvalidJavadocTagsDeprecatedRef = false; public boolean reportInvalidJavadocTagsNotVisibleRef = false; // check missing javadoc tags public int reportMissingJavadocTagsVisibility = ClassFileConstants.AccPublic; public boolean reportMissingJavadocTagsOverriding = false; // check missing javadoc comments public int reportMissingJavadocCommentsVisibility = ClassFileConstants.AccPublic; public boolean reportMissingJavadocCommentsOverriding = false; // JSR bytecode inlining public boolean inlineJsrBytecode = false; // javadoc comment support public boolean docCommentSupport = false; // suppress warning annotation public boolean suppressWarnings = true; // treat optional error as fatal or just like warning? public boolean treatOptionalErrorAsFatal = true; /** * Initializing the compiler options with defaults */ public CompilerOptions(){ // use default options } /** * Initializing the compiler options with external settings * @param settings */ public CompilerOptions(Map settings){ if (settings == null) return; set(settings); } public Map getMap() { /*@NoRep*/ Map</*@RepRep*/ Object, /*@RepRep*/ Object> optionsMap = new HashMap(30); optionsMap.put(OPTION_LocalVariableAttribute, (this.produceDebugAttributes & Vars) != 0 ? GENERATE : DO_NOT_GENERATE); optionsMap.put(OPTION_LineNumberAttribute, (this.produceDebugAttributes & Lines) != 0 ? GENERATE : DO_NOT_GENERATE); optionsMap.put(OPTION_SourceFileAttribute, (this.produceDebugAttributes & Source) != 0 ? GENERATE : DO_NOT_GENERATE); optionsMap.put(OPTION_PreserveUnusedLocal, this.preserveAllLocalVariables ? PRESERVE : OPTIMIZE_OUT); optionsMap.put(OPTION_DocCommentSupport, this.docCommentSupport ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportMethodWithConstructorName, getSeverityString(MethodWithConstructorName)); optionsMap.put(OPTION_ReportOverridingPackageDefaultMethod, getSeverityString(OverriddenPackageDefaultMethod)); optionsMap.put(OPTION_ReportDeprecation, getSeverityString(UsingDeprecatedAPI)); optionsMap.put(OPTION_ReportDeprecationInDeprecatedCode, this.reportDeprecationInsideDeprecatedCode ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportDeprecationWhenOverridingDeprecatedMethod, this.reportDeprecationWhenOverridingDeprecatedMethod ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportHiddenCatchBlock, getSeverityString(MaskedCatchBlock)); optionsMap.put(OPTION_ReportUnusedLocal, getSeverityString(UnusedLocalVariable)); optionsMap.put(OPTION_ReportUnusedParameter, getSeverityString(UnusedArgument)); optionsMap.put(OPTION_ReportUnusedImport, getSeverityString(UnusedImport)); optionsMap.put(OPTION_ReportSyntheticAccessEmulation, getSeverityString(AccessEmulation)); optionsMap.put(OPTION_ReportNoEffectAssignment, getSeverityString(NoEffectAssignment)); optionsMap.put(OPTION_ReportNonExternalizedStringLiteral, getSeverityString(NonExternalizedString)); optionsMap.put(OPTION_ReportNoImplicitStringConversion, getSeverityString(NoImplicitStringConversion)); optionsMap.put(OPTION_ReportNonStaticAccessToStatic, getSeverityString(NonStaticAccessToStatic)); optionsMap.put(OPTION_ReportIndirectStaticAccess, getSeverityString(IndirectStaticAccess)); optionsMap.put(OPTION_ReportIncompatibleNonInheritedInterfaceMethod, getSeverityString(IncompatibleNonInheritedInterfaceMethod)); optionsMap.put(OPTION_ReportUnusedPrivateMember, getSeverityString(UnusedPrivateMember)); optionsMap.put(OPTION_ReportLocalVariableHiding, getSeverityString(LocalVariableHiding)); optionsMap.put(OPTION_ReportFieldHiding, getSeverityString(FieldHiding)); optionsMap.put(OPTION_ReportTypeParameterHiding, getSeverityString(TypeParameterHiding)); optionsMap.put(OPTION_ReportPossibleAccidentalBooleanAssignment, getSeverityString(AccidentalBooleanAssign)); optionsMap.put(OPTION_ReportEmptyStatement, getSeverityString(EmptyStatement)); optionsMap.put(OPTION_ReportAssertIdentifier, getSeverityString(AssertUsedAsAnIdentifier)); optionsMap.put(OPTION_ReportEnumIdentifier, getSeverityString(EnumUsedAsAnIdentifier)); optionsMap.put(OPTION_ReportUndocumentedEmptyBlock, getSeverityString(UndocumentedEmptyBlock)); optionsMap.put(OPTION_ReportUnnecessaryTypeCheck, getSeverityString(UnnecessaryTypeCheck)); optionsMap.put(OPTION_ReportUnnecessaryElse, getSeverityString(UnnecessaryElse)); optionsMap.put(OPTION_ReportAutoboxing, getSeverityString(AutoBoxing)); optionsMap.put(OPTION_ReportAnnotationSuperInterface, getSeverityString(AnnotationSuperInterface)); optionsMap.put(OPTION_ReportIncompleteEnumSwitch, getSeverityString(IncompleteEnumSwitch)); optionsMap.put(OPTION_ReportInvalidJavadoc, getSeverityString(InvalidJavadoc)); optionsMap.put(OPTION_ReportInvalidJavadocTagsVisibility, getVisibilityString(this.reportInvalidJavadocTagsVisibility)); optionsMap.put(OPTION_ReportInvalidJavadocTags, this.reportInvalidJavadocTags ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportInvalidJavadocTagsDeprecatedRef, this.reportInvalidJavadocTagsDeprecatedRef ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportInvalidJavadocTagsNotVisibleRef, this.reportInvalidJavadocTagsNotVisibleRef ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportMissingJavadocTags, getSeverityString(MissingJavadocTags)); optionsMap.put(OPTION_ReportMissingJavadocTagsVisibility, getVisibilityString(this.reportMissingJavadocTagsVisibility)); optionsMap.put(OPTION_ReportMissingJavadocTagsOverriding, this.reportMissingJavadocTagsOverriding ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportMissingJavadocComments, getSeverityString(MissingJavadocComments)); optionsMap.put(OPTION_ReportMissingJavadocCommentsVisibility, getVisibilityString(this.reportMissingJavadocCommentsVisibility)); optionsMap.put(OPTION_ReportMissingJavadocCommentsOverriding, this.reportMissingJavadocCommentsOverriding ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportFinallyBlockNotCompletingNormally, getSeverityString(FinallyBlockNotCompleting)); optionsMap.put(OPTION_ReportUnusedDeclaredThrownException, getSeverityString(UnusedDeclaredThrownException)); optionsMap.put(OPTION_ReportUnusedDeclaredThrownExceptionWhenOverriding, this.reportUnusedDeclaredThrownExceptionWhenOverriding ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportUnqualifiedFieldAccess, getSeverityString(UnqualifiedFieldAccess)); optionsMap.put(OPTION_ReportUncheckedTypeOperation, getSeverityString(UncheckedTypeOperation)); optionsMap.put(OPTION_ReportRawTypeReference, getSeverityString(RawTypeReference)); optionsMap.put(OPTION_ReportFinalParameterBound, getSeverityString(FinalParameterBound)); optionsMap.put(OPTION_ReportMissingSerialVersion, getSeverityString(MissingSerialVersion)); optionsMap.put(OPTION_ReportForbiddenReference, getSeverityString(ForbiddenReference)); optionsMap.put(OPTION_ReportDiscouragedReference, getSeverityString(DiscouragedReference)); optionsMap.put(OPTION_ReportVarargsArgumentNeedCast, getSeverityString(VarargsArgumentNeedCast)); optionsMap.put(OPTION_ReportMissingOverrideAnnotation, getSeverityString(MissingOverrideAnnotation)); optionsMap.put(OPTION_ReportMissingDeprecatedAnnotation, getSeverityString(MissingDeprecatedAnnotation)); optionsMap.put(OPTION_ReportIncompleteEnumSwitch, getSeverityString(IncompleteEnumSwitch)); optionsMap.put(OPTION_ReportUnusedLabel, getSeverityString(UnusedLabel)); optionsMap.put(OPTION_Compliance, versionFromJdkLevel(this.complianceLevel)); optionsMap.put(OPTION_Source, versionFromJdkLevel(this.sourceLevel)); optionsMap.put(OPTION_TargetPlatform, versionFromJdkLevel(this.targetJDK)); optionsMap.put(OPTION_FatalOptionalError, this.treatOptionalErrorAsFatal ? ENABLED : DISABLED); if (this.defaultEncoding != null) { optionsMap.put(OPTION_Encoding, this.defaultEncoding); } optionsMap.put(OPTION_TaskTags, this.taskTags == null ? "" : new String(CharOperation.concatWith(this.taskTags,','))); //$NON-NLS-1$ optionsMap.put(OPTION_TaskPriorities, this.taskPriorites == null ? "" : new String(CharOperation.concatWith(this.taskPriorites,','))); //$NON-NLS-1$ optionsMap.put(OPTION_TaskCaseSensitive, this.isTaskCaseSensitive ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportUnusedParameterWhenImplementingAbstract, this.reportUnusedParameterWhenImplementingAbstract ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportUnusedParameterWhenOverridingConcrete, this.reportUnusedParameterWhenOverridingConcrete ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportSpecialParameterHidingField, this.reportSpecialParameterHidingField ? ENABLED : DISABLED); optionsMap.put(OPTION_MaxProblemPerUnit, String.valueOf(this.maxProblemsPerUnit)); optionsMap.put(OPTION_InlineJsr, this.inlineJsrBytecode ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportNullReference, getSeverityString(NullReference)); optionsMap.put(OPTION_SuppressWarnings, this.suppressWarnings ? ENABLED : DISABLED); optionsMap.put(OPTION_ReportUnhandledWarningToken, getSeverityString(UnhandledWarningToken)); optionsMap.put(OPTION_ReportParameterAssignment, getSeverityString(ParameterAssignment)); return optionsMap; } public int getSeverity(long irritant) { if((this.errorThreshold & irritant) != 0) return ProblemSeverities.Error | ProblemSeverities.Optional; if((this.warningThreshold & irritant) != 0) return ProblemSeverities.Warning | ProblemSeverities.Optional; return ProblemSeverities.Ignore; } public String getSeverityString(long irritant) { if((this.warningThreshold & irritant) != 0) return WARNING; if((this.errorThreshold & irritant) != 0) return ERROR; return IGNORE; } public String getVisibilityString(int level) { switch (level) { case ClassFileConstants.AccPublic: return PUBLIC; case ClassFileConstants.AccProtected: return PROTECTED; case ClassFileConstants.AccPrivate: return PRIVATE; default: return DEFAULT; } } public void set(/*@NoRep*/ Map</*@RepRep*/ Object, /*@RepRep*/ Object> optionsMap) { Object optionValue; if ((optionValue = optionsMap.get(OPTION_LocalVariableAttribute)) != null) { if (GENERATE.equals(optionValue)) { this.produceDebugAttributes |= Vars; } else if (DO_NOT_GENERATE.equals(optionValue)) { this.produceDebugAttributes &= ~Vars; } } if ((optionValue = optionsMap.get(OPTION_LineNumberAttribute)) != null) { if (GENERATE.equals(optionValue)) { this.produceDebugAttributes |= Lines; } else if (DO_NOT_GENERATE.equals(optionValue)) { this.produceDebugAttributes &= ~Lines; } } if ((optionValue = optionsMap.get(OPTION_SourceFileAttribute)) != null) { if (GENERATE.equals(optionValue)) { this.produceDebugAttributes |= Source; } else if (DO_NOT_GENERATE.equals(optionValue)) { this.produceDebugAttributes &= ~Source; } } if ((optionValue = optionsMap.get(OPTION_PreserveUnusedLocal)) != null) { if (PRESERVE.equals(optionValue)) { this.preserveAllLocalVariables = true; } else if (OPTIMIZE_OUT.equals(optionValue)) { this.preserveAllLocalVariables = false; } } if ((optionValue = optionsMap.get(OPTION_ReportDeprecationInDeprecatedCode)) != null) { if (ENABLED.equals(optionValue)) { this.reportDeprecationInsideDeprecatedCode = true; } else if (DISABLED.equals(optionValue)) { this.reportDeprecationInsideDeprecatedCode = false; } } if ((optionValue = optionsMap.get(OPTION_ReportDeprecationWhenOverridingDeprecatedMethod)) != null) { if (ENABLED.equals(optionValue)) { this.reportDeprecationWhenOverridingDeprecatedMethod = true; } else if (DISABLED.equals(optionValue)) { this.reportDeprecationWhenOverridingDeprecatedMethod = false; } } if ((optionValue = optionsMap.get(OPTION_ReportUnusedDeclaredThrownExceptionWhenOverriding)) != null) { if (ENABLED.equals(optionValue)) { this.reportUnusedDeclaredThrownExceptionWhenOverriding = true; } else if (DISABLED.equals(optionValue)) { this.reportUnusedDeclaredThrownExceptionWhenOverriding = false; } } if ((optionValue = optionsMap.get(OPTION_Compliance)) != null) { long level = versionToJdkLevel(optionValue); if (level != 0) this.complianceLevel = level; } if ((optionValue = optionsMap.get(OPTION_Source)) != null) { long level = versionToJdkLevel(optionValue); if (level != 0) this.sourceLevel = level; } if ((optionValue = optionsMap.get(OPTION_TargetPlatform)) != null) { long level = versionToJdkLevel(optionValue); if (level != 0) this.targetJDK = level; if (this.targetJDK >= ClassFileConstants.JDK1_5) this.inlineJsrBytecode = true; // forced in 1.5 mode } if ((optionValue = optionsMap.get(OPTION_Encoding)) != null) { if (optionValue instanceof String) { this.defaultEncoding = null; String stringValue = (String) optionValue; if (stringValue.length() > 0){ try { new InputStreamReader(new ByteArrayInputStream(new byte[0]), stringValue); this.defaultEncoding = stringValue; } catch(UnsupportedEncodingException e){ // ignore unsupported encoding } } } } if ((optionValue = optionsMap.get(OPTION_PrivateConstructorAccess)) != null) { long level = versionToJdkLevel(optionValue); if (level >= ClassFileConstants.JDK1_3) this.isPrivateConstructorAccessChangingVisibility = true; } if ((optionValue = optionsMap.get(OPTION_ReportUnusedParameterWhenImplementingAbstract)) != null) { if (ENABLED.equals(optionValue)) { this.reportUnusedParameterWhenImplementingAbstract = true; } else if (DISABLED.equals(optionValue)) { this.reportUnusedParameterWhenImplementingAbstract = false; } } if ((optionValue = optionsMap.get(OPTION_ReportUnusedParameterWhenOverridingConcrete)) != null) { if (ENABLED.equals(optionValue)) { this.reportUnusedParameterWhenOverridingConcrete = true; } else if (DISABLED.equals(optionValue)) { this.reportUnusedParameterWhenOverridingConcrete = false; } } if ((optionValue = optionsMap.get(OPTION_ReportSpecialParameterHidingField)) != null) { if (ENABLED.equals(optionValue)) { this.reportSpecialParameterHidingField = true; } else if (DISABLED.equals(optionValue)) { this.reportSpecialParameterHidingField = false; } } if ((optionValue = optionsMap.get(OPTION_MaxProblemPerUnit)) != null) { if (optionValue instanceof String) { String stringValue = (String) optionValue; try { int val = Integer.parseInt(stringValue); if (val >= 0) this.maxProblemsPerUnit = val; } catch(NumberFormatException e){ // ignore ill-formatted limit } } } if ((optionValue = optionsMap.get(OPTION_TaskTags)) != null) { if (optionValue instanceof String) { String stringValue = (String) optionValue; if (stringValue.length() == 0) { this.taskTags = null; } else { this.taskTags = CharOperation.splitAndTrimOn(',', stringValue.toCharArray()); } } } if ((optionValue = optionsMap.get(OPTION_TaskPriorities)) != null) { if (optionValue instanceof String) { String stringValue = (String) optionValue; if (stringValue.length() == 0) { this.taskPriorites = null; } else { this.taskPriorites = CharOperation.splitAndTrimOn(',', stringValue.toCharArray()); } } } if ((optionValue = optionsMap.get(OPTION_TaskCaseSensitive)) != null) { if (ENABLED.equals(optionValue)) { this.isTaskCaseSensitive = true; } else if (DISABLED.equals(optionValue)) { this.isTaskCaseSensitive = false; } } if ((optionValue = optionsMap.get(OPTION_InlineJsr)) != null) { if (this.targetJDK < ClassFileConstants.JDK1_5) { // only optional if target < 1.5 (inlining on from 1.5 on) if (ENABLED.equals(optionValue)) { this.inlineJsrBytecode = true; } else if (DISABLED.equals(optionValue)) { this.inlineJsrBytecode = false; } } } if ((optionValue = optionsMap.get(OPTION_SuppressWarnings)) != null) { if (ENABLED.equals(optionValue)) { this.suppressWarnings = true; } else if (DISABLED.equals(optionValue)) { this.suppressWarnings = false; } } if ((optionValue = optionsMap.get(OPTION_FatalOptionalError)) != null) { if (ENABLED.equals(optionValue)) { this.treatOptionalErrorAsFatal = true; } else if (DISABLED.equals(optionValue)) { this.treatOptionalErrorAsFatal = false; } } if ((optionValue = optionsMap.get(OPTION_ReportMethodWithConstructorName)) != null) updateSeverity(MethodWithConstructorName, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportOverridingPackageDefaultMethod)) != null) updateSeverity(OverriddenPackageDefaultMethod, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportDeprecation)) != null) updateSeverity(UsingDeprecatedAPI, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportHiddenCatchBlock)) != null) updateSeverity(MaskedCatchBlock, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedLocal)) != null) updateSeverity(UnusedLocalVariable, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedParameter)) != null) updateSeverity(UnusedArgument, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedImport)) != null) updateSeverity(UnusedImport, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedPrivateMember)) != null) updateSeverity(UnusedPrivateMember, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedDeclaredThrownException)) != null) updateSeverity(UnusedDeclaredThrownException, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportNoImplicitStringConversion)) != null) updateSeverity(NoImplicitStringConversion, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportSyntheticAccessEmulation)) != null) updateSeverity(AccessEmulation, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportLocalVariableHiding)) != null) updateSeverity(LocalVariableHiding, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportFieldHiding)) != null) updateSeverity(FieldHiding, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportTypeParameterHiding)) != null) updateSeverity(TypeParameterHiding, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportPossibleAccidentalBooleanAssignment)) != null) updateSeverity(AccidentalBooleanAssign, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportEmptyStatement)) != null) updateSeverity(EmptyStatement, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportNonExternalizedStringLiteral)) != null) updateSeverity(NonExternalizedString, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportAssertIdentifier)) != null) updateSeverity(AssertUsedAsAnIdentifier, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportEnumIdentifier)) != null) updateSeverity(EnumUsedAsAnIdentifier, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportNonStaticAccessToStatic)) != null) updateSeverity(NonStaticAccessToStatic, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportIndirectStaticAccess)) != null) updateSeverity(IndirectStaticAccess, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportIncompatibleNonInheritedInterfaceMethod)) != null) updateSeverity(IncompatibleNonInheritedInterfaceMethod, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUndocumentedEmptyBlock)) != null) updateSeverity(UndocumentedEmptyBlock, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnnecessaryTypeCheck)) != null) updateSeverity(UnnecessaryTypeCheck, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnnecessaryElse)) != null) updateSeverity(UnnecessaryElse, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportFinallyBlockNotCompletingNormally)) != null) updateSeverity(FinallyBlockNotCompleting, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnqualifiedFieldAccess)) != null) updateSeverity(UnqualifiedFieldAccess, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportNoEffectAssignment)) != null) updateSeverity(NoEffectAssignment, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUncheckedTypeOperation)) != null) updateSeverity(UncheckedTypeOperation, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportRawTypeReference)) != null) updateSeverity(RawTypeReference, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportFinalParameterBound)) != null) updateSeverity(FinalParameterBound, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportMissingSerialVersion)) != null) updateSeverity(MissingSerialVersion, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportForbiddenReference)) != null) updateSeverity(ForbiddenReference, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportDiscouragedReference)) != null) updateSeverity(DiscouragedReference, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportVarargsArgumentNeedCast)) != null) updateSeverity(VarargsArgumentNeedCast, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportNullReference)) != null) updateSeverity(NullReference, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportAutoboxing)) != null) updateSeverity(AutoBoxing, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportAnnotationSuperInterface)) != null) updateSeverity(AnnotationSuperInterface, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportMissingOverrideAnnotation)) != null) updateSeverity(MissingOverrideAnnotation, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportMissingDeprecatedAnnotation)) != null) updateSeverity(MissingDeprecatedAnnotation, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportIncompleteEnumSwitch)) != null) updateSeverity(IncompleteEnumSwitch, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnhandledWarningToken)) != null) updateSeverity(UnhandledWarningToken, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportUnusedLabel)) != null) updateSeverity(UnusedLabel, optionValue); if ((optionValue = optionsMap.get(OPTION_ReportParameterAssignment)) != null) updateSeverity(ParameterAssignment, optionValue); // Javadoc options if ((optionValue = optionsMap.get(OPTION_DocCommentSupport)) != null) { if (ENABLED.equals(optionValue)) { this.docCommentSupport = true; } else if (DISABLED.equals(optionValue)) { this.docCommentSupport = false; } } if ((optionValue = optionsMap.get(OPTION_ReportInvalidJavadoc)) != null) { updateSeverity(InvalidJavadoc, optionValue); } if ( (optionValue = optionsMap.get(OPTION_ReportInvalidJavadocTagsVisibility)) != null) { if (PUBLIC.equals(optionValue)) { this.reportInvalidJavadocTagsVisibility = ClassFileConstants.AccPublic; } else if (PROTECTED.equals(optionValue)) { this.reportInvalidJavadocTagsVisibility = ClassFileConstants.AccProtected; } else if (DEFAULT.equals(optionValue)) { this.reportInvalidJavadocTagsVisibility = ClassFileConstants.AccDefault; } else if (PRIVATE.equals(optionValue)) { this.reportInvalidJavadocTagsVisibility = ClassFileConstants.AccPrivate; } } if ((optionValue = optionsMap.get(OPTION_ReportInvalidJavadocTags)) != null) { if (ENABLED.equals(optionValue)) { this.reportInvalidJavadocTags = true; } else if (DISABLED.equals(optionValue)) { this.reportInvalidJavadocTags = false; } } if ((optionValue = optionsMap.get(OPTION_ReportInvalidJavadocTagsDeprecatedRef)) != null) { if (ENABLED.equals(optionValue)) { this.reportInvalidJavadocTagsDeprecatedRef = true; } else if (DISABLED.equals(optionValue)) { this.reportInvalidJavadocTagsDeprecatedRef = false; } } if ((optionValue = optionsMap.get(OPTION_ReportInvalidJavadocTagsNotVisibleRef)) != null) { if (ENABLED.equals(optionValue)) { this.reportInvalidJavadocTagsNotVisibleRef = true; } else if (DISABLED.equals(optionValue)) { this.reportInvalidJavadocTagsNotVisibleRef = false; } } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocTags)) != null) { updateSeverity(MissingJavadocTags, optionValue); } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocTagsVisibility)) != null) { if (PUBLIC.equals(optionValue)) { this.reportMissingJavadocTagsVisibility = ClassFileConstants.AccPublic; } else if (PROTECTED.equals(optionValue)) { this.reportMissingJavadocTagsVisibility = ClassFileConstants.AccProtected; } else if (DEFAULT.equals(optionValue)) { this.reportMissingJavadocTagsVisibility = ClassFileConstants.AccDefault; } else if (PRIVATE.equals(optionValue)) { this.reportMissingJavadocTagsVisibility = ClassFileConstants.AccPrivate; } } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocTagsOverriding)) != null) { if (ENABLED.equals(optionValue)) { this.reportMissingJavadocTagsOverriding = true; } else if (DISABLED.equals(optionValue)) { this.reportMissingJavadocTagsOverriding = false; } } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocComments)) != null) { updateSeverity(MissingJavadocComments, optionValue); } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocCommentsVisibility)) != null) { if (PUBLIC.equals(optionValue)) { this.reportMissingJavadocCommentsVisibility = ClassFileConstants.AccPublic; } else if (PROTECTED.equals(optionValue)) { this.reportMissingJavadocCommentsVisibility = ClassFileConstants.AccProtected; } else if (DEFAULT.equals(optionValue)) { this.reportMissingJavadocCommentsVisibility = ClassFileConstants.AccDefault; } else if (PRIVATE.equals(optionValue)) { this.reportMissingJavadocCommentsVisibility = ClassFileConstants.AccPrivate; } } if ((optionValue = optionsMap.get(OPTION_ReportMissingJavadocCommentsOverriding)) != null) { if (ENABLED.equals(optionValue)) { this.reportMissingJavadocCommentsOverriding = true; } else if (DISABLED.equals(optionValue)) { this.reportMissingJavadocCommentsOverriding = false; } } } public String toString() { StringBuffer buf = new StringBuffer("CompilerOptions:"); //$NON-NLS-1$ buf.append("\n\t- local variables debug attributes: ").append((this.produceDebugAttributes & Vars) != 0 ? "ON" : " OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- line number debug attributes: ").append((this.produceDebugAttributes & Lines) != 0 ? "ON" : " OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- source debug attributes: ").append((this.produceDebugAttributes & Source) != 0 ? "ON" : " OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- preserve all local variables: ").append(this.preserveAllLocalVariables ? "ON" : " OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- method with constructor name: ").append(getSeverityString(MethodWithConstructorName)); //$NON-NLS-1$ buf.append("\n\t- overridden package default method: ").append(getSeverityString(OverriddenPackageDefaultMethod)); //$NON-NLS-1$ buf.append("\n\t- deprecation: ").append(getSeverityString(UsingDeprecatedAPI)); //$NON-NLS-1$ buf.append("\n\t- masked catch block: ").append(getSeverityString(MaskedCatchBlock)); //$NON-NLS-1$ buf.append("\n\t- unused local variable: ").append(getSeverityString(UnusedLocalVariable)); //$NON-NLS-1$ buf.append("\n\t- unused parameter: ").append(getSeverityString(UnusedArgument)); //$NON-NLS-1$ buf.append("\n\t- unused import: ").append(getSeverityString(UnusedImport)); //$NON-NLS-1$ buf.append("\n\t- synthetic access emulation: ").append(getSeverityString(AccessEmulation)); //$NON-NLS-1$ buf.append("\n\t- assignment with no effect: ").append(getSeverityString(NoEffectAssignment)); //$NON-NLS-1$ buf.append("\n\t- non externalized string: ").append(getSeverityString(NonExternalizedString)); //$NON-NLS-1$ buf.append("\n\t- static access receiver: ").append(getSeverityString(NonStaticAccessToStatic)); //$NON-NLS-1$ buf.append("\n\t- indirect static access: ").append(getSeverityString(IndirectStaticAccess)); //$NON-NLS-1$ buf.append("\n\t- incompatible non inherited interface method: ").append(getSeverityString(IncompatibleNonInheritedInterfaceMethod)); //$NON-NLS-1$ buf.append("\n\t- unused private member: ").append(getSeverityString(UnusedPrivateMember)); //$NON-NLS-1$ buf.append("\n\t- local variable hiding another variable: ").append(getSeverityString(LocalVariableHiding)); //$NON-NLS-1$ buf.append("\n\t- field hiding another variable: ").append(getSeverityString(FieldHiding)); //$NON-NLS-1$ buf.append("\n\t- type parameter hiding another type: ").append(getSeverityString(TypeParameterHiding)); //$NON-NLS-1$ buf.append("\n\t- possible accidental boolean assignment: ").append(getSeverityString(AccidentalBooleanAssign)); //$NON-NLS-1$ buf.append("\n\t- superfluous semicolon: ").append(getSeverityString(EmptyStatement)); //$NON-NLS-1$ buf.append("\n\t- uncommented empty block: ").append(getSeverityString(UndocumentedEmptyBlock)); //$NON-NLS-1$ buf.append("\n\t- unnecessary type check: ").append(getSeverityString(UnnecessaryTypeCheck)); //$NON-NLS-1$ buf.append("\n\t- javadoc comment support: ").append(this.docCommentSupport ? "ON" : " OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t\t+ invalid javadoc: ").append(getSeverityString(InvalidJavadoc)); //$NON-NLS-1$ buf.append("\n\t\t+ report invalid javadoc tags: ").append(this.reportInvalidJavadocTags ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t\t\t* deprecated references: ").append(this.reportInvalidJavadocTagsDeprecatedRef ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t\t\t* not visible references: ").append(this.reportInvalidJavadocTagsNotVisibleRef ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t\t+ visibility level to report invalid javadoc tags: ").append(getVisibilityString(this.reportInvalidJavadocTagsVisibility)); //$NON-NLS-1$ buf.append("\n\t\t+ missing javadoc tags: ").append(getSeverityString(MissingJavadocTags)); //$NON-NLS-1$ buf.append("\n\t\t+ visibility level to report missing javadoc tags: ").append(getVisibilityString(this.reportMissingJavadocTagsVisibility)); //$NON-NLS-1$ buf.append("\n\t\t+ report missing javadoc tags in overriding methods: ").append(this.reportMissingJavadocTagsOverriding ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t\t+ missing javadoc comments: ").append(getSeverityString(MissingJavadocComments)); //$NON-NLS-1$ buf.append("\n\t\t+ visibility level to report missing javadoc comments: ").append(getVisibilityString(this.reportMissingJavadocCommentsVisibility)); //$NON-NLS-1$ buf.append("\n\t\t+ report missing javadoc comments in overriding methods: ").append(this.reportMissingJavadocCommentsOverriding ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- finally block not completing normally: ").append(getSeverityString(FinallyBlockNotCompleting)); //$NON-NLS-1$ buf.append("\n\t- unused declared thrown exception: ").append(getSeverityString(UnusedDeclaredThrownException)); //$NON-NLS-1$ buf.append("\n\t- unused declared thrown exception when overriding: ").append(this.reportUnusedDeclaredThrownExceptionWhenOverriding ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- unnecessary else: ").append(getSeverityString(UnnecessaryElse)); //$NON-NLS-1$ buf.append("\n\t- JDK compliance level: "+ versionFromJdkLevel(this.complianceLevel)); //$NON-NLS-1$ buf.append("\n\t- JDK source level: "+ versionFromJdkLevel(this.sourceLevel)); //$NON-NLS-1$ buf.append("\n\t- JDK target level: "+ versionFromJdkLevel(this.targetJDK)); //$NON-NLS-1$ buf.append("\n\t- private constructor access: ").append(this.isPrivateConstructorAccessChangingVisibility ? "extra argument" : "make default access"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- verbose : ").append(this.verbose ? "ON" : "OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- produce reference info : ").append(this.produceReferenceInfo ? "ON" : "OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- parse literal expressions as constants : ").append(this.parseLiteralExpressionsAsConstants ? "ON" : "OFF"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ buf.append("\n\t- encoding : ").append(this.defaultEncoding == null ? "<default>" : this.defaultEncoding); //$NON-NLS-1$ //$NON-NLS-2$ buf.append("\n\t- task tags: ").append(this.taskTags == null ? "" : new String(CharOperation.concatWith(this.taskTags,','))); //$NON-NLS-1$ //$NON-NLS-2$ buf.append("\n\t- task priorities : ").append(this.taskPriorites == null ? "" : new String(CharOperation.concatWith(this.taskPriorites,','))); //$NON-NLS-1$ //$NON-NLS-2$ buf.append("\n\t- report deprecation inside deprecated code : ").append(this.reportDeprecationInsideDeprecatedCode ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- report deprecation when overriding deprecated method : ").append(this.reportDeprecationWhenOverridingDeprecatedMethod ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- report unused parameter when implementing abstract method : ").append(this.reportUnusedParameterWhenImplementingAbstract ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- report unused parameter when overriding concrete method : ").append(this.reportUnusedParameterWhenOverridingConcrete ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- report constructor/setter parameter hiding existing field : ").append(this.reportSpecialParameterHidingField ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- inline JSR bytecode : ").append(this.inlineJsrBytecode ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- unsafe type operation: ").append(getSeverityString(UncheckedTypeOperation)); //$NON-NLS-1$ buf.append("\n\t- unsafe raw type: ").append(getSeverityString(RawTypeReference)); //$NON-NLS-1$ buf.append("\n\t- final bound for type parameter: ").append(getSeverityString(FinalParameterBound)); //$NON-NLS-1$ buf.append("\n\t- missing serialVersionUID: ").append(getSeverityString(MissingSerialVersion)); //$NON-NLS-1$ buf.append("\n\t- varargs argument need cast: ").append(getSeverityString(VarargsArgumentNeedCast)); //$NON-NLS-1$ buf.append("\n\t- forbidden reference to type with access restriction: ").append(getSeverityString(ForbiddenReference)); //$NON-NLS-1$ buf.append("\n\t- discouraged reference to type with access restriction: ").append(getSeverityString(DiscouragedReference)); //$NON-NLS-1$ buf.append("\n\t- null reference: ").append(getSeverityString(NullReference)); //$NON-NLS-1$ buf.append("\n\t- autoboxing: ").append(getSeverityString(AutoBoxing)); //$NON-NLS-1$ buf.append("\n\t- annotation super interface: ").append(getSeverityString(AnnotationSuperInterface)); //$NON-NLS-1$ buf.append("\n\t- missing @Override annotation: ").append(getSeverityString(MissingOverrideAnnotation)); //$NON-NLS-1$ buf.append("\n\t- missing @Deprecated annotation: ").append(getSeverityString(MissingDeprecatedAnnotation)); //$NON-NLS-1$ buf.append("\n\t- incomplete enum switch: ").append(getSeverityString(IncompleteEnumSwitch)); //$NON-NLS-1$ buf.append("\n\t- suppress warnings: ").append(this.suppressWarnings ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- unhandled warning token: ").append(getSeverityString(UnhandledWarningToken)); //$NON-NLS-1$ buf.append("\n\t- unused label: ").append(getSeverityString(UnusedLabel)); //$NON-NLS-1$ buf.append("\n\t- treat optional error as fatal: ").append(this.treatOptionalErrorAsFatal ? ENABLED : DISABLED); //$NON-NLS-1$ buf.append("\n\t- parameter assignment: ").append(getSeverityString(ParameterAssignment)); //$NON-NLS-1$ return buf.toString(); } void updateSeverity(long irritant, Object severityString) { if (ERROR.equals(severityString)) { this.errorThreshold |= irritant; this.warningThreshold &= ~irritant; } else if (WARNING.equals(severityString)) { this.errorThreshold &= ~irritant; this.warningThreshold |= irritant; } else if (IGNORE.equals(severityString)) { this.errorThreshold &= ~irritant; this.warningThreshold &= ~irritant; } } public static long versionToJdkLevel(Object versionID) { if (VERSION_1_1.equals(versionID)) { return ClassFileConstants.JDK1_1; } else if (VERSION_1_2.equals(versionID)) { return ClassFileConstants.JDK1_2; } else if (VERSION_1_3.equals(versionID)) { return ClassFileConstants.JDK1_3; } else if (VERSION_1_4.equals(versionID)) { return ClassFileConstants.JDK1_4; } else if (VERSION_1_5.equals(versionID)) { return ClassFileConstants.JDK1_5; } else if (VERSION_1_6.equals(versionID)) { return ClassFileConstants.JDK1_6; } return 0; // unknown } public static String versionFromJdkLevel(long jdkLevel) { if (jdkLevel == ClassFileConstants.JDK1_1) { return VERSION_1_1; } else if (jdkLevel == ClassFileConstants.JDK1_2) { return VERSION_1_2; } else if (jdkLevel == ClassFileConstants.JDK1_3) { return VERSION_1_3; } else if (jdkLevel == ClassFileConstants.JDK1_4) { return VERSION_1_4; } else if (jdkLevel == ClassFileConstants.JDK1_5) { return VERSION_1_5; } else if (jdkLevel == ClassFileConstants.JDK1_6) { return VERSION_1_6; } return ""; // unknown version //$NON-NLS-1$ } /** * Return all warning option names for use as keys in compiler options maps. * @return all warning option names * TODO (maxime) revise for ensuring completeness */ public static String[] warningOptionNames() { String[] result = { OPTION_ReportAnnotationSuperInterface, OPTION_ReportAssertIdentifier, OPTION_ReportAutoboxing, OPTION_ReportDeprecation, OPTION_ReportDiscouragedReference, OPTION_ReportEmptyStatement, OPTION_ReportEnumIdentifier, OPTION_ReportFieldHiding, OPTION_ReportFinalParameterBound, OPTION_ReportFinallyBlockNotCompletingNormally, OPTION_ReportForbiddenReference, OPTION_ReportHiddenCatchBlock, OPTION_ReportIncompatibleNonInheritedInterfaceMethod, OPTION_ReportIncompleteEnumSwitch, OPTION_ReportIndirectStaticAccess, OPTION_ReportInvalidJavadoc, OPTION_ReportLocalVariableHiding, OPTION_ReportMethodWithConstructorName, OPTION_ReportMissingDeprecatedAnnotation, OPTION_ReportMissingJavadocComments, OPTION_ReportMissingJavadocTags, OPTION_ReportMissingOverrideAnnotation, OPTION_ReportMissingSerialVersion, OPTION_ReportNoEffectAssignment, OPTION_ReportNoImplicitStringConversion, OPTION_ReportNonExternalizedStringLiteral, OPTION_ReportNonStaticAccessToStatic, OPTION_ReportNullReference, OPTION_ReportOverridingPackageDefaultMethod, OPTION_ReportParameterAssignment, OPTION_ReportPossibleAccidentalBooleanAssignment, OPTION_ReportSyntheticAccessEmulation, OPTION_ReportTypeParameterHiding, OPTION_ReportUncheckedTypeOperation, OPTION_ReportUndocumentedEmptyBlock, OPTION_ReportUnnecessaryElse, OPTION_ReportUnnecessaryTypeCheck, OPTION_ReportUnqualifiedFieldAccess, OPTION_ReportUnusedDeclaredThrownException, OPTION_ReportUnusedImport, OPTION_ReportUnusedLocal, OPTION_ReportUnusedParameter, OPTION_ReportUnusedPrivateMember, OPTION_ReportVarargsArgumentNeedCast, OPTION_ReportUnhandledWarningToken, }; return result; } public static String warningTokenFromIrritant(long irritant) { int irritantInt = (int) irritant; if (irritantInt == irritant) { switch (irritantInt) { case (int) (InvalidJavadoc | UsingDeprecatedAPI) : case (int) UsingDeprecatedAPI : return "deprecation"; //$NON-NLS-1$ case (int) FinallyBlockNotCompleting : return "finally"; //$NON-NLS-1$ case (int) FieldHiding : case (int) LocalVariableHiding : case (int) MaskedCatchBlock : return "hiding"; //$NON-NLS-1$ case (int) NonExternalizedString : return "nls"; //$NON-NLS-1$ case (int) UnusedLocalVariable : case (int) UnusedArgument : case (int) UnusedPrivateMember: case (int) UnusedDeclaredThrownException: return "unused"; //$NON-NLS-1$ case (int) IndirectStaticAccess : case (int) NonStaticAccessToStatic : return "static-access"; //$NON-NLS-1$ case (int) AccessEmulation : return "synthetic-access"; //$NON-NLS-1$ case (int) UnqualifiedFieldAccess : return "unqualified-field-access"; //$NON-NLS-1$ case (int) UncheckedTypeOperation : return "unchecked"; //$NON-NLS-1$ } } else { irritantInt = (int)(irritant >>> 32); switch (irritantInt) { case (int)(MissingSerialVersion >>> 32) : return "serial"; //$NON-NLS-1$ case (int)(AutoBoxing >>> 32) : return "boxing"; //$NON-NLS-1$ case (int)(TypeParameterHiding >>> 32) : return "hiding"; //$NON-NLS-1$ case (int)(IncompleteEnumSwitch >>> 32) : return "incomplete-switch"; //$NON-NLS-1$ case (int)(MissingDeprecatedAnnotation >>> 32) : return "dep-ann"; //$NON-NLS-1$ case (int)(RawTypeReference >>> 32): return "unchecked"; //$NON-NLS-1$ case (int) UnusedLabel: return "unused"; //$NON-NLS-1$ } } return null; } public static long warningTokenToIrritant(String warningToken) { if (warningToken == null || warningToken.length() == 0) return 0; switch (warningToken.charAt(0)) { case 'a' : if ("all".equals(warningToken)) //$NON-NLS-1$ return 0xFFFFFFFFFFFFFFFFl; // suppress all warnings break; case 'b' : if ("boxing".equals(warningToken)) //$NON-NLS-1$ return AutoBoxing; break; case 'd' : if ("deprecation".equals(warningToken)) //$NON-NLS-1$ return UsingDeprecatedAPI; if ("dep-ann".equals(warningToken)) //$NON-NLS-1$ return MissingDeprecatedAnnotation; break; case 'f' : if ("finally".equals(warningToken)) //$NON-NLS-1$ return FinallyBlockNotCompleting; break; case 'h' : if ("hiding".equals(warningToken)) //$NON-NLS-1$ return FieldHiding | LocalVariableHiding | MaskedCatchBlock | TypeParameterHiding; case 'i' : if ("incomplete-switch".equals(warningToken)) //$NON-NLS-1$ return IncompleteEnumSwitch; break; case 'n' : if ("nls".equals(warningToken)) //$NON-NLS-1$ return NonExternalizedString; break; case 's' : if ("serial".equals(warningToken)) //$NON-NLS-1$ return MissingSerialVersion; if ("static-access".equals(warningToken)) //$NON-NLS-1$ return IndirectStaticAccess | NonStaticAccessToStatic; if ("synthetic-access".equals(warningToken)) //$NON-NLS-1$ return AccessEmulation; break; case 'u' : if ("unused".equals(warningToken)) //$NON-NLS-1$ return UnusedLocalVariable | UnusedArgument | UnusedPrivateMember | UnusedDeclaredThrownException | UnusedLabel; if ("unchecked".equals(warningToken)) //$NON-NLS-1$ return UncheckedTypeOperation | RawTypeReference; if ("unqualified-field-access".equals(warningToken)) //$NON-NLS-1$ return UnqualifiedFieldAccess; break; } return 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.db.commitlog; import java.io.*; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.function.BiConsumer; import java.util.stream.Collectors; import java.util.zip.CRC32; import java.util.zip.Checksum; import com.google.common.collect.Iterables; import com.google.common.io.Files; import org.junit.*; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.Util; import org.apache.cassandra.io.compress.ZstdCompressor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.schema.TableId; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.ParameterizedClass; import org.apache.cassandra.config.Config.DiskFailurePolicy; import org.apache.cassandra.db.*; import org.apache.cassandra.db.commitlog.CommitLogReplayer.CommitLogReplayException; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.marshal.AsciiType; import org.apache.cassandra.db.marshal.BytesType; import org.apache.cassandra.db.partitions.PartitionUpdate; import org.apache.cassandra.db.rows.Row; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.compress.DeflateCompressor; import org.apache.cassandra.io.compress.LZ4Compressor; import org.apache.cassandra.io.compress.SnappyCompressor; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.schema.KeyspaceParams; import org.apache.cassandra.security.EncryptionContext; import org.apache.cassandra.security.EncryptionContextGenerator; import org.apache.cassandra.utils.Hex; import org.apache.cassandra.utils.JVMStabilityInspector; import org.apache.cassandra.utils.KillerForTests; import org.apache.cassandra.utils.Pair; import org.apache.cassandra.utils.vint.VIntCoding; import org.junit.After; import static org.apache.cassandra.db.commitlog.CommitLogSegment.ENTRY_OVERHEAD_SIZE; import static org.apache.cassandra.utils.ByteBufferUtil.bytes; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.apache.cassandra.db.marshal.IntegerType; import org.apache.cassandra.db.marshal.MapType; import org.apache.cassandra.db.marshal.SetType; import org.apache.cassandra.db.marshal.UTF8Type; @Ignore @RunWith(Parameterized.class) public abstract class CommitLogTest { protected static final String KEYSPACE1 = "CommitLogTest"; private static final String KEYSPACE2 = "CommitLogTestNonDurable"; protected static final String STANDARD1 = "Standard1"; private static final String STANDARD2 = "Standard2"; private static final String CUSTOM1 = "Custom1"; private static JVMStabilityInspector.Killer oldKiller; private static KillerForTests testKiller; public CommitLogTest(ParameterizedClass commitLogCompression, EncryptionContext encryptionContext) { DatabaseDescriptor.setCommitLogCompression(commitLogCompression); DatabaseDescriptor.setEncryptionContext(encryptionContext); } @Parameters() public static Collection<Object[]> generateData() { return Arrays.asList(new Object[][]{ {null, EncryptionContextGenerator.createDisabledContext()}, // No compression, no encryption {null, EncryptionContextGenerator.createContext(true)}, // Encryption {new ParameterizedClass(LZ4Compressor.class.getName(), Collections.emptyMap()), EncryptionContextGenerator.createDisabledContext()}, {new ParameterizedClass(SnappyCompressor.class.getName(), Collections.emptyMap()), EncryptionContextGenerator.createDisabledContext()}, {new ParameterizedClass(DeflateCompressor.class.getName(), Collections.emptyMap()), EncryptionContextGenerator.createDisabledContext()}, {new ParameterizedClass(ZstdCompressor.class.getName(), Collections.emptyMap()), EncryptionContextGenerator.createDisabledContext()}}); } public static void beforeClass() throws ConfigurationException { // Disable durable writes for system keyspaces to prevent system mutations, e.g. sstable_activity, // to end up in CL segments and cause unexpected results in this test wrt counting CL segments, // see CASSANDRA-12854 KeyspaceParams.DEFAULT_LOCAL_DURABLE_WRITES = false; SchemaLoader.prepareServer(); TableMetadata.Builder custom = TableMetadata.builder(KEYSPACE1, CUSTOM1) .addPartitionKeyColumn("k", IntegerType.instance) .addClusteringColumn("c1", MapType.getInstance(UTF8Type.instance, UTF8Type.instance, false)) .addClusteringColumn("c2", SetType.getInstance(UTF8Type.instance, false)) .addStaticColumn("s", IntegerType.instance); SchemaLoader.createKeyspace(KEYSPACE1, KeyspaceParams.simple(1), SchemaLoader.standardCFMD(KEYSPACE1, STANDARD1, 0, AsciiType.instance, BytesType.instance), SchemaLoader.standardCFMD(KEYSPACE1, STANDARD2, 0, AsciiType.instance, BytesType.instance), custom); SchemaLoader.createKeyspace(KEYSPACE2, KeyspaceParams.simpleTransient(1), SchemaLoader.standardCFMD(KEYSPACE1, STANDARD1, 0, AsciiType.instance, BytesType.instance), SchemaLoader.standardCFMD(KEYSPACE1, STANDARD2, 0, AsciiType.instance, BytesType.instance)); CompactionManager.instance.disableAutoCompaction(); testKiller = new KillerForTests(); // While we don't want the JVM to be nuked from under us on a test failure, we DO want some indication of // an error. If we hit a "Kill the JVM" condition while working with the CL when we don't expect it, an aggressive // KillerForTests will assertion out on us. oldKiller = JVMStabilityInspector.replaceKiller(testKiller); } @AfterClass public static void afterClass() { JVMStabilityInspector.replaceKiller(oldKiller); } @Before public void beforeTest() throws IOException { CommitLog.instance.resetUnsafe(true); } @After public void afterTest() { testKiller.reset(); } @Test public void testRecoveryWithEmptyLog() throws Exception { runExpecting(() -> { CommitLog.instance.recoverFiles(new File[]{ tmpFile(CommitLogDescriptor.current_version), tmpFile(CommitLogDescriptor.current_version) }); return null; }, CommitLogReplayException.class); } @Test public void testRecoveryWithEmptyFinalLog() throws Exception { CommitLog.instance.recoverFiles(tmpFile(CommitLogDescriptor.current_version)); } /** * Since commit log segments can be allocated before they're needed, the commit log file with the highest * id isn't neccesarily the last log that we wrote to. We should remove header only logs on recover so we * can tolerate truncated logs */ @Test public void testHeaderOnlyFileFiltering() throws Exception { File directory = Files.createTempDir(); CommitLogDescriptor desc1 = new CommitLogDescriptor(CommitLogDescriptor.current_version, 1, null, DatabaseDescriptor.getEncryptionContext()); CommitLogDescriptor desc2 = new CommitLogDescriptor(CommitLogDescriptor.current_version, 2, null, DatabaseDescriptor.getEncryptionContext()); ByteBuffer buffer; // this has a header and malformed data File file1 = new File(directory, desc1.fileName()); buffer = ByteBuffer.allocate(1024); CommitLogDescriptor.writeHeader(buffer, desc1); int pos = buffer.position(); CommitLogSegment.writeSyncMarker(desc1.id, buffer, buffer.position(), buffer.position(), buffer.position() + 128); buffer.position(pos + 8); buffer.putInt(5); buffer.putInt(6); try (OutputStream lout = new FileOutputStream(file1)) { lout.write(buffer.array()); } // this has only a header File file2 = new File(directory, desc2.fileName()); buffer = ByteBuffer.allocate(1024); CommitLogDescriptor.writeHeader(buffer, desc2); try (OutputStream lout = new FileOutputStream(file2)) { lout.write(buffer.array()); } // one corrupt file and one header only file should be ok runExpecting(() -> { CommitLog.instance.recoverFiles(file1, file2); return null; }, null); // 2 corrupt files and one header only file should fail runExpecting(() -> { CommitLog.instance.recoverFiles(file1, file1, file2); return null; }, CommitLogReplayException.class); } @Test public void testRecoveryWithZeroLog() throws Exception { testRecovery(new byte[10], CommitLogReplayException.class); } @Test public void testRecoveryWithShortLog() throws Exception { // force EOF while reading log testRecoveryWithBadSizeArgument(100, 10); } @Test public void testRecoveryWithShortSize() throws Exception { runExpecting(() -> { testRecovery(new byte[2], CommitLogDescriptor.current_version); return null; }, CommitLogReplayException.class); } @Test public void testRecoveryWithShortMutationSize() throws Exception { testRecoveryWithBadSizeArgument(9, 10); } private void testRecoveryWithGarbageLog() throws Exception { byte[] garbage = new byte[100]; (new java.util.Random()).nextBytes(garbage); testRecovery(garbage, CommitLogDescriptor.current_version); } @Test public void testRecoveryWithGarbageLog_fail() throws Exception { runExpecting(() -> { testRecoveryWithGarbageLog(); return null; }, CommitLogReplayException.class); } @Test public void testRecoveryWithGarbageLog_ignoredByProperty() throws Exception { try { System.setProperty(CommitLogReplayer.IGNORE_REPLAY_ERRORS_PROPERTY, "true"); testRecoveryWithGarbageLog(); } finally { System.clearProperty(CommitLogReplayer.IGNORE_REPLAY_ERRORS_PROPERTY); } } @Test public void testRecoveryWithBadSizeChecksum() throws Exception { Checksum checksum = new CRC32(); checksum.update(100); testRecoveryWithBadSizeArgument(100, 100, ~checksum.getValue()); } @Test public void testRecoveryWithNegativeSizeArgument() throws Exception { // garbage from a partial/bad flush could be read as a negative size even if there is no EOF testRecoveryWithBadSizeArgument(-10, 10); // negative size, but no EOF } @Test public void testDontDeleteIfDirty() throws Exception { Keyspace ks = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs1 = ks.getColumnFamilyStore(STANDARD1); ColumnFamilyStore cfs2 = ks.getColumnFamilyStore(STANDARD2); // Roughly 32 MB mutation Mutation m = new RowUpdateBuilder(cfs1.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(DatabaseDescriptor.getCommitLogSegmentSize() / 4)) .build(); // Adding it 5 times CommitLog.instance.add(m); CommitLog.instance.add(m); CommitLog.instance.add(m); CommitLog.instance.add(m); CommitLog.instance.add(m); // Adding new mutation on another CF Mutation m2 = new RowUpdateBuilder(cfs2.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(4)) .build(); CommitLog.instance.add(m2); assertEquals(2, CommitLog.instance.segmentManager.getActiveSegments().size()); TableId id2 = m2.getTableIds().iterator().next(); CommitLog.instance.discardCompletedSegments(id2, CommitLogPosition.NONE, CommitLog.instance.getCurrentPosition()); // Assert we still have both our segments assertEquals(2, CommitLog.instance.segmentManager.getActiveSegments().size()); } @Test public void testDeleteIfNotDirty() throws Exception { Keyspace ks = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs1 = ks.getColumnFamilyStore(STANDARD1); ColumnFamilyStore cfs2 = ks.getColumnFamilyStore(STANDARD2); // Roughly 32 MB mutation Mutation rm = new RowUpdateBuilder(cfs1.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate((DatabaseDescriptor.getCommitLogSegmentSize()/4) - 1)) .build(); // Adding it twice (won't change segment) CommitLog.instance.add(rm); CommitLog.instance.add(rm); assertEquals(1, CommitLog.instance.segmentManager.getActiveSegments().size()); // "Flush": this won't delete anything TableId id1 = rm.getTableIds().iterator().next(); CommitLog.instance.sync(true); CommitLog.instance.discardCompletedSegments(id1, CommitLogPosition.NONE, CommitLog.instance.getCurrentPosition()); assertEquals(1, CommitLog.instance.segmentManager.getActiveSegments().size()); // Adding new mutation on another CF, large enough (including CL entry overhead) that a new segment is created Mutation rm2 = new RowUpdateBuilder(cfs2.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(DatabaseDescriptor.getMaxMutationSize() - 200)) .build(); CommitLog.instance.add(rm2); // also forces a new segment, since each entry-with-overhead is just under half the CL size CommitLog.instance.add(rm2); CommitLog.instance.add(rm2); Collection<CommitLogSegment> segments = CommitLog.instance.segmentManager.getActiveSegments(); assertEquals(String.format("Expected 3 segments but got %d (%s)", segments.size(), getDirtyCFIds(segments)), 3, segments.size()); // "Flush" second cf: The first segment should be deleted since we // didn't write anything on cf1 since last flush (and we flush cf2) TableId id2 = rm2.getTableIds().iterator().next(); CommitLog.instance.discardCompletedSegments(id2, CommitLogPosition.NONE, CommitLog.instance.getCurrentPosition()); segments = CommitLog.instance.segmentManager.getActiveSegments(); // Assert we still have both our segment assertEquals(String.format("Expected 1 segment but got %d (%s)", segments.size(), getDirtyCFIds(segments)), 1, segments.size()); } private String getDirtyCFIds(Collection<CommitLogSegment> segments) { return "Dirty tableIds: <" + String.join(", ", segments.stream() .map(CommitLogSegment::getDirtyTableIds) .flatMap(uuids -> uuids.stream()) .distinct() .map(uuid -> uuid.toString()).collect(Collectors.toList())) + ">"; } private static int getMaxRecordDataSize(String keyspace, ByteBuffer key, String cfName, String colName) { ColumnFamilyStore cfs = Keyspace.open(keyspace).getColumnFamilyStore(cfName); // We don't want to allocate a size of 0 as this is optimized under the hood and our computation would // break testEqualRecordLimit int allocSize = 1; Mutation rm = new RowUpdateBuilder(cfs.metadata(), 0, key) .clustering(colName) .add("val", ByteBuffer.allocate(allocSize)).build(); int max = DatabaseDescriptor.getMaxMutationSize(); max -= CommitLogSegment.ENTRY_OVERHEAD_SIZE; // log entry overhead // Note that the size of the value if vint encoded. So we first compute the ovehead of the mutation without the value and it's size int mutationOverhead = rm.serializedSize(MessagingService.current_version) - (VIntCoding.computeVIntSize(allocSize) + allocSize); max -= mutationOverhead; // Now, max is the max for both the value and it's size. But we want to know how much we can allocate, i.e. the size of the value. int sizeOfMax = VIntCoding.computeVIntSize(max); max -= sizeOfMax; assert VIntCoding.computeVIntSize(max) == sizeOfMax; // sanity check that we're still encoded with the size we though we would return max; } private static int getMaxRecordDataSize() { return getMaxRecordDataSize(KEYSPACE1, bytes("k"), STANDARD1, "bytes"); } // CASSANDRA-3615 @Test public void testEqualRecordLimit() throws Exception { ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(STANDARD1); Mutation rm = new RowUpdateBuilder(cfs.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(getMaxRecordDataSize())) .build(); CommitLog.instance.add(rm); } @Test(expected = MutationExceededMaxSizeException.class) public void testExceedRecordLimit() throws Exception { Keyspace ks = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = ks.getColumnFamilyStore(STANDARD1); Mutation rm = new RowUpdateBuilder(cfs.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(1 + getMaxRecordDataSize())) .build(); long cnt = CommitLog.instance.metrics.oversizedMutations.getCount(); try { CommitLog.instance.add(rm); } catch (MutationExceededMaxSizeException e) { Assert.assertEquals(cnt + 1, CommitLog.instance.metrics.oversizedMutations.getCount()); throw e; } throw new AssertionError("mutation larger than limit was accepted"); } @Test public void testExceedRecordLimitWithMultiplePartitions() throws Exception { CommitLog.instance.resetUnsafe(true); List<Mutation> mutations = new ArrayList<>(); Keyspace ks = Keyspace.open(KEYSPACE1); char[] keyChars = new char[MutationExceededMaxSizeException.PARTITION_MESSAGE_LIMIT]; Arrays.fill(keyChars, 'k'); String key = new String(keyChars); // large mutation mutations.add(new RowUpdateBuilder(ks.getColumnFamilyStore(STANDARD1).metadata(), 0, key) .clustering("bytes") .add("val", ByteBuffer.allocate(1 + getMaxRecordDataSize())) .build()); // smaller mutation mutations.add(new RowUpdateBuilder(ks.getColumnFamilyStore(STANDARD2).metadata(), 0, key) .clustering("bytes") .add("val", ByteBuffer.allocate(1 + getMaxRecordDataSize() - 1024)) .build()); Mutation mutation = Mutation.merge(mutations); try { CommitLog.instance.add(Mutation.merge(mutations)); throw new AssertionError("mutation larger than limit was accepted"); } catch (MutationExceededMaxSizeException exception) { String message = exception.getMessage(); long mutationSize = mutation.serializedSize(MessagingService.current_version) + ENTRY_OVERHEAD_SIZE; final String expectedMessagePrefix = String.format("Encountered an oversized mutation (%d/%d) for keyspace: %s.", mutationSize, DatabaseDescriptor.getMaxMutationSize(), KEYSPACE1); assertTrue(message.startsWith(expectedMessagePrefix)); assertTrue(message.contains(String.format("%s.%s and 1 more.", STANDARD1, key))); } } protected void testRecoveryWithBadSizeArgument(int size, int dataSize) throws Exception { Checksum checksum = new CRC32(); checksum.update(size); testRecoveryWithBadSizeArgument(size, dataSize, checksum.getValue()); } protected void testRecoveryWithBadSizeArgument(int size, int dataSize, long checksum) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); DataOutputStream dout = new DataOutputStream(out); dout.writeInt(size); dout.writeLong(checksum); dout.write(new byte[dataSize]); dout.close(); testRecovery(out.toByteArray(), CommitLogReplayException.class); } /** * Create a temporary commit log file with an appropriate descriptor at the head. * * @return the commit log file reference and the first position after the descriptor in the file * (so that subsequent writes happen at the correct file location). */ protected Pair<File, Integer> tmpFile() throws IOException { EncryptionContext encryptionContext = DatabaseDescriptor.getEncryptionContext(); CommitLogDescriptor desc = new CommitLogDescriptor(CommitLogDescriptor.current_version, CommitLogSegment.getNextId(), DatabaseDescriptor.getCommitLogCompression(), encryptionContext); ByteBuffer buf = ByteBuffer.allocate(1024); CommitLogDescriptor.writeHeader(buf, desc, getAdditionalHeaders(encryptionContext)); buf.flip(); int positionAfterHeader = buf.limit() + 1; File logFile = new File(DatabaseDescriptor.getCommitLogLocation(), desc.fileName()); try (OutputStream lout = new FileOutputStream(logFile)) { lout.write(buf.array(), 0, buf.limit()); } return Pair.create(logFile, positionAfterHeader); } private Map<String, String> getAdditionalHeaders(EncryptionContext encryptionContext) { if (!encryptionContext.isEnabled()) return Collections.emptyMap(); // if we're testing encryption, we need to write out a cipher IV to the descriptor headers byte[] buf = new byte[16]; new Random().nextBytes(buf); return Collections.singletonMap(EncryptionContext.ENCRYPTION_IV, Hex.bytesToHex(buf)); } protected File tmpFile(int version) { File logFile = FileUtils.createTempFile("CommitLog-" + version + "-", ".log"); assert logFile.length() == 0; return logFile; } protected Void testRecovery(byte[] logData, int version) throws Exception { File logFile = tmpFile(version); try (OutputStream lout = new FileOutputStream(logFile)) { lout.write(logData); //statics make it annoying to test things correctly CommitLog.instance.recover(logFile.getPath()); //CASSANDRA-1119 / CASSANDRA-1179 throw on failure*/ } return null; } protected Void testRecovery(CommitLogDescriptor desc, byte[] logData) throws Exception { File logFile = tmpFile(desc.version); CommitLogDescriptor fromFile = CommitLogDescriptor.fromFileName(logFile.getName()); // Change id to match file. desc = new CommitLogDescriptor(desc.version, fromFile.id, desc.compression, desc.getEncryptionContext()); ByteBuffer buf = ByteBuffer.allocate(1024); CommitLogDescriptor.writeHeader(buf, desc, getAdditionalHeaders(desc.getEncryptionContext())); try (OutputStream lout = new FileOutputStream(logFile)) { lout.write(buf.array(), 0, buf.position()); lout.write(logData); //statics make it annoying to test things correctly CommitLog.instance.recover(logFile.getPath()); //CASSANDRA-1119 / CASSANDRA-1179 throw on failure*/ } return null; } @Test public void testRecoveryWithIdMismatch() throws Exception { CommitLogDescriptor desc = new CommitLogDescriptor(4, null, EncryptionContextGenerator.createDisabledContext()); File logFile = tmpFile(desc.version); ByteBuffer buf = ByteBuffer.allocate(1024); CommitLogDescriptor.writeHeader(buf, desc); try (OutputStream lout = new FileOutputStream(logFile)) { lout.write(buf.array(), 0, buf.position()); runExpecting(() -> { CommitLog.instance.recover(logFile.getPath()); //CASSANDRA-1119 / CASSANDRA-1179 throw on failure*/ return null; }, CommitLogReplayException.class); } } @Test public void testRecoveryWithBadCompressor() throws Exception { CommitLogDescriptor desc = new CommitLogDescriptor(4, new ParameterizedClass("UnknownCompressor", null), EncryptionContextGenerator.createDisabledContext()); runExpecting(() -> { testRecovery(desc, new byte[0]); return null; }, CommitLogReplayException.class); } protected void runExpecting(Callable<Void> r, Class<?> expected) { Throwable caught = null; try { r.call(); } catch (Throwable t) { if (expected != t.getClass()) throw new AssertionError("Expected exception " + expected + ", got " + t, t); caught = t; } if (expected != null && caught == null) Assert.fail("Expected exception " + expected + " but call completed successfully."); assertEquals("JVM kill state doesn't match expectation.", expected != null, testKiller.wasKilled()); } protected void testRecovery(final byte[] logData, Class<?> expected) throws Exception { ParameterizedClass commitLogCompression = DatabaseDescriptor.getCommitLogCompression(); EncryptionContext encryptionContext = DatabaseDescriptor.getEncryptionContext(); runExpecting(() -> testRecovery(logData, CommitLogDescriptor.current_version), expected); } @Test public void testTruncateWithoutSnapshot() throws ExecutionException, InterruptedException, IOException { boolean originalState = DatabaseDescriptor.isAutoSnapshot(); try { boolean prev = DatabaseDescriptor.isAutoSnapshot(); DatabaseDescriptor.setAutoSnapshot(false); Keyspace ks = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs1 = ks.getColumnFamilyStore(STANDARD1); ColumnFamilyStore cfs2 = ks.getColumnFamilyStore(STANDARD2); new RowUpdateBuilder(cfs1.metadata(), 0, "k").clustering("bytes").add("val", ByteBuffer.allocate(100)).build().applyUnsafe(); cfs1.truncateBlocking(); DatabaseDescriptor.setAutoSnapshot(prev); Mutation m2 = new RowUpdateBuilder(cfs2.metadata(), 0, "k") .clustering("bytes") .add("val", ByteBuffer.allocate(DatabaseDescriptor.getCommitLogSegmentSize() / 4)) .build(); for (int i = 0 ; i < 5 ; i++) CommitLog.instance.add(m2); assertEquals(2, CommitLog.instance.segmentManager.getActiveSegments().size()); CommitLogPosition position = CommitLog.instance.getCurrentPosition(); for (Keyspace keyspace : Keyspace.system()) for (ColumnFamilyStore syscfs : keyspace.getColumnFamilyStores()) CommitLog.instance.discardCompletedSegments(syscfs.metadata().id, CommitLogPosition.NONE, position); CommitLog.instance.discardCompletedSegments(cfs2.metadata().id, CommitLogPosition.NONE, position); assertEquals(1, CommitLog.instance.segmentManager.getActiveSegments().size()); } finally { DatabaseDescriptor.setAutoSnapshot(originalState); } } @Test public void testTruncateWithoutSnapshotNonDurable() throws IOException { boolean originalState = DatabaseDescriptor.getAutoSnapshot(); try { DatabaseDescriptor.setAutoSnapshot(false); Keyspace notDurableKs = Keyspace.open(KEYSPACE2); assertFalse(notDurableKs.getMetadata().params.durableWrites); ColumnFamilyStore cfs = notDurableKs.getColumnFamilyStore("Standard1"); new RowUpdateBuilder(cfs.metadata(), 0, "key1") .clustering("bytes").add("val", bytes("abcd")) .build() .applyUnsafe(); assertTrue(Util.getOnlyRow(Util.cmd(cfs).columns("val").build()) .cells().iterator().next().value().equals(bytes("abcd"))); cfs.truncateBlocking(); Util.assertEmpty(Util.cmd(cfs).columns("val").build()); } finally { DatabaseDescriptor.setAutoSnapshot(originalState); } } @Test public void replaySimple() throws IOException { int cellCount = 0; ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(STANDARD1); final Mutation rm1 = new RowUpdateBuilder(cfs.metadata(), 0, "k1") .clustering("bytes") .add("val", bytes("this is a string")) .build(); cellCount += 1; CommitLog.instance.add(rm1); final Mutation rm2 = new RowUpdateBuilder(cfs.metadata(), 0, "k2") .clustering("bytes") .add("val", bytes("this is a string")) .build(); cellCount += 1; CommitLog.instance.add(rm2); CommitLog.instance.sync(true); SimpleCountingReplayer replayer = new SimpleCountingReplayer(CommitLog.instance, CommitLogPosition.NONE, cfs.metadata()); List<String> activeSegments = CommitLog.instance.getActiveSegmentNames(); assertFalse(activeSegments.isEmpty()); File[] files = new File(CommitLog.instance.segmentManager.storageDirectory).listFiles((file, name) -> activeSegments.contains(name)); replayer.replayFiles(files); assertEquals(cellCount, replayer.cells); } @Test public void replayWithDiscard() throws IOException { int cellCount = 0; int max = 1024; int discardPosition = (int)(max * .8); // an arbitrary number of entries that we'll skip on the replay CommitLogPosition commitLogPosition = null; ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(STANDARD1); for (int i = 0; i < max; i++) { final Mutation rm1 = new RowUpdateBuilder(cfs.metadata(), 0, "k" + 1) .clustering("bytes") .add("val", bytes("this is a string")) .build(); CommitLogPosition position = CommitLog.instance.add(rm1); if (i == discardPosition) commitLogPosition = position; if (i > discardPosition) { cellCount += 1; } } CommitLog.instance.sync(true); SimpleCountingReplayer replayer = new SimpleCountingReplayer(CommitLog.instance, commitLogPosition, cfs.metadata()); List<String> activeSegments = CommitLog.instance.getActiveSegmentNames(); assertFalse(activeSegments.isEmpty()); File[] files = new File(CommitLog.instance.segmentManager.storageDirectory).listFiles((file, name) -> activeSegments.contains(name)); replayer.replayFiles(files); assertEquals(cellCount, replayer.cells); } class SimpleCountingReplayer extends CommitLogReplayer { private final CommitLogPosition filterPosition; private final TableMetadata metadata; int cells; int skipped; SimpleCountingReplayer(CommitLog commitLog, CommitLogPosition filterPosition, TableMetadata metadata) { super(commitLog, filterPosition, Collections.emptyMap(), ReplayFilter.create()); this.filterPosition = filterPosition; this.metadata = metadata; } @SuppressWarnings("resource") @Override public void handleMutation(Mutation m, int size, int entryLocation, CommitLogDescriptor desc) { // Filter out system writes that could flake the test. if (!KEYSPACE1.equals(m.getKeyspaceName())) return; if (entryLocation <= filterPosition.position) { // Skip over this mutation. skipped++; return; } for (PartitionUpdate partitionUpdate : m.getPartitionUpdates()) { // Only process mutations for the CF's we're testing against, since we can't deterministically predict // whether or not system keyspaces will be mutated during a test. if (partitionUpdate.metadata().name.equals(metadata.name)) { for (Row row : partitionUpdate) cells += Iterables.size(row.cells()); } } } } public void testUnwriteableFlushRecovery() throws ExecutionException, InterruptedException, IOException { CommitLog.instance.resetUnsafe(true); ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(STANDARD1); DiskFailurePolicy oldPolicy = DatabaseDescriptor.getDiskFailurePolicy(); try { DatabaseDescriptor.setDiskFailurePolicy(DiskFailurePolicy.ignore); for (int i = 0 ; i < 5 ; i++) { new RowUpdateBuilder(cfs.metadata(), 0, "k") .clustering("c" + i).add("val", ByteBuffer.allocate(100)) .build() .apply(); if (i == 2) { try (Closeable c = Util.markDirectoriesUnwriteable(cfs)) { cfs.forceBlockingFlush(); } catch (Throwable t) { // expected. Cause (after some wrappings) should be a write error while (!(t instanceof FSWriteError)) t = t.getCause(); } } else cfs.forceBlockingFlush(); } } finally { DatabaseDescriptor.setDiskFailurePolicy(oldPolicy); } CommitLog.instance.sync(true); System.setProperty("cassandra.replayList", KEYSPACE1 + "." + STANDARD1); // Currently we don't attempt to re-flush a memtable that failed, thus make sure data is replayed by commitlog. // If retries work subsequent flushes should clear up error and this should change to expect 0. Assert.assertEquals(1, CommitLog.instance.resetUnsafe(false)); System.clearProperty("cassandra.replayList"); } public void testOutOfOrderFlushRecovery(BiConsumer<ColumnFamilyStore, Memtable> flushAction, boolean performCompaction) throws ExecutionException, InterruptedException, IOException { CommitLog.instance.resetUnsafe(true); ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(STANDARD1); for (int i = 0 ; i < 5 ; i++) { new RowUpdateBuilder(cfs.metadata(), 0, "k") .clustering("c" + i).add("val", ByteBuffer.allocate(100)) .build() .apply(); Memtable current = cfs.getTracker().getView().getCurrentMemtable(); if (i == 2) current.makeUnflushable(); flushAction.accept(cfs, current); } if (performCompaction) cfs.forceMajorCompaction(); // Make sure metadata saves and reads fine for (SSTableReader reader : cfs.getLiveSSTables()) reader.reloadSSTableMetadata(); CommitLog.instance.sync(true); System.setProperty("cassandra.replayList", KEYSPACE1 + "." + STANDARD1); // In the absence of error, this should be 0 because forceBlockingFlush/forceRecycleAllSegments would have // persisted all data in the commit log. Because we know there was an error, there must be something left to // replay. Assert.assertEquals(1, CommitLog.instance.resetUnsafe(false)); System.clearProperty("cassandra.replayList"); } BiConsumer<ColumnFamilyStore, Memtable> flush = (cfs, current) -> { try { cfs.forceBlockingFlush(); } catch (Throwable t) { // expected after makeUnflushable. Cause (after some wrappings) should be a write error while (!(t instanceof FSWriteError)) t = t.getCause(); // Wait for started flushes to complete. cfs.switchMemtableIfCurrent(current); } }; BiConsumer<ColumnFamilyStore, Memtable> recycleSegments = (cfs, current) -> { // Move to new commit log segment and try to flush all data. Also delete segments that no longer contain // flushed data. // This does not stop on errors and should retain segments for which flushing failed. CommitLog.instance.forceRecycleAllSegments(); // Wait for started flushes to complete. cfs.switchMemtableIfCurrent(current); }; @Test public void testOutOfOrderFlushRecovery() throws ExecutionException, InterruptedException, IOException { testOutOfOrderFlushRecovery(flush, false); } @Test public void testOutOfOrderLogDiscard() throws ExecutionException, InterruptedException, IOException { testOutOfOrderFlushRecovery(recycleSegments, false); } @Test public void testOutOfOrderFlushRecoveryWithCompaction() throws ExecutionException, InterruptedException, IOException { testOutOfOrderFlushRecovery(flush, true); } @Test public void testOutOfOrderLogDiscardWithCompaction() throws ExecutionException, InterruptedException, IOException { testOutOfOrderFlushRecovery(recycleSegments, true); } @Test public void testRecoveryWithCollectionClusteringKeysStatic() throws Exception { ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore(CUSTOM1); RowUpdateBuilder rb = new RowUpdateBuilder(cfs.metadata(), 0, BigInteger.ONE); rb.add("s", BigInteger.valueOf(2)); Mutation rm = rb.build(); CommitLog.instance.add(rm); int replayed = 0; try { System.setProperty(CommitLogReplayer.IGNORE_REPLAY_ERRORS_PROPERTY, "true"); replayed = CommitLog.instance.resetUnsafe(false); } finally { System.clearProperty(CommitLogReplayer.IGNORE_REPLAY_ERRORS_PROPERTY); } Assert.assertEquals(replayed, 1); } }
package io.cattle.platform.servicediscovery.deployment.impl; import static io.cattle.platform.core.model.tables.LoadBalancerTable.LOAD_BALANCER; import io.cattle.platform.core.constants.InstanceConstants; import io.cattle.platform.core.dao.GenericMapDao; import io.cattle.platform.core.model.Instance; import io.cattle.platform.core.model.LoadBalancer; import io.cattle.platform.core.model.LoadBalancerHostMap; import io.cattle.platform.core.model.Service; import io.cattle.platform.core.model.ServiceExposeMap; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.object.util.DataAccessor; import io.cattle.platform.object.util.DataUtils; import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants; import io.cattle.platform.servicediscovery.api.constants.ServiceDiscoveryConstants.KIND; import io.cattle.platform.servicediscovery.api.dao.ServiceExposeMapDao; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstance; import io.cattle.platform.servicediscovery.deployment.DeploymentUnitInstanceFactory; import io.cattle.platform.servicediscovery.deployment.impl.DeploymentManagerImpl.DeploymentServiceContext; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import javax.inject.Inject; import org.apache.commons.lang3.tuple.Pair; public class DeploymentUnitInstanceFactoryImpl implements DeploymentUnitInstanceFactory { @Inject ObjectManager objectMgr; @Inject ServiceExposeMapDao expMapDao; @Inject GenericMapDao mapDao; @Override @SuppressWarnings("unchecked") public DeploymentUnitInstance createDeploymentUnitInstance(DeploymentServiceContext context, String uuid, Service service, String instanceName, Object instanceObj, Map<String, String> labels, String launchConfigName) { if (service.getKind().equalsIgnoreCase(KIND.SERVICE.name())) { Instance instance = null; if (instanceObj != null) { instance = (Instance) instanceObj; } return new DefaultDeploymentUnitInstance(context, uuid, service, instanceName, instance, labels, launchConfigName); } else if (service.getKind().equalsIgnoreCase(KIND.LOADBALANCERSERVICE.name())) { LoadBalancerHostMap hostMap = null; if (instanceObj != null) { hostMap = (LoadBalancerHostMap) instanceObj; } return new LoadBalancerDeploymentUnitInstance(context, uuid, service, hostMap, labels, launchConfigName); } else if (service.getKind().equalsIgnoreCase(KIND.EXTERNALSERVICE.name())) { Pair<String, String> ipHostName = null; if (instanceObj != null) { ipHostName = (Pair<String, String>) instanceObj; } return new ExternalDeploymentUnitInstance(context, uuid, service, launchConfigName, ipHostName.getLeft(), ipHostName.getRight()); } return null; } @Override public List<DeploymentUnit> collectDeploymentUnits(List<Service> services, DeploymentServiceContext context) { /* * 1. find all containers related to the service through the serviceexposemaps for regular service, and * loadBalancerHostMap for the lb service. Then group all the objects * by the label 'io.rancher.deployment.unit'. When containers are deployed through service discovery that * label will be placed on them. * * 2. put all the containers to the deploymentUnit */ Map<String, Map<String, String>> uuidToLabels = new HashMap<>(); Map<String, List<DeploymentUnitInstance>> uuidToInstances = new HashMap<>(); List<DeploymentUnit> units = new ArrayList<>(); for (Service service : services) { if (service.getKind().equalsIgnoreCase(KIND.SERVICE.name())) { collectDefaultServiceInstances(context, uuidToLabels, uuidToInstances, service); } else if (service.getKind().equalsIgnoreCase(KIND.LOADBALANCERSERVICE.name())) { collectLoadBalancerServiceInstances(context, uuidToLabels, uuidToInstances, service); } else if (service.getKind().equalsIgnoreCase(KIND.EXTERNALSERVICE.name())) { collectExternalServiceInstances(context, uuidToLabels, uuidToInstances, service); } for (String uuid : uuidToInstances.keySet()) { DeploymentUnit unit = new DeploymentUnit(context, uuid, services, uuidToInstances.get(uuid), uuidToLabels.get(uuid)); units.add(unit); } } return units; } protected void collectExternalServiceInstances(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service) { // 1. request deployment units for ips defined on the service createExternalUnitsForIps(context, uuidToLabels, uuidToInstances, service); // 2. request deployment units for hostname defined on the service createDeploymentUnitsForHostname(context, uuidToLabels, uuidToInstances, service); } protected void createDeploymentUnitsForHostname(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service) { String hostName = DataAccessor.fields(service) .withKey(ServiceDiscoveryConstants.FIELD_HOSTNAME).as(String.class); if (hostName != null) { createExternalDeploymentUnit(context, uuidToLabels, uuidToInstances, service, null, hostName); } // get existing maps (they will be cleaned up later if ip is no longer on the service) List<? extends ServiceExposeMap> exposeMaps = expMapDao.getNonRemovedServiceHostnameMaps(service.getId()); for (ServiceExposeMap exposeMap : exposeMaps) { createExternalDeploymentUnit(context, uuidToLabels, uuidToInstances, service, null, exposeMap.getHostName()); } } @SuppressWarnings("unchecked") protected void createExternalUnitsForIps(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service) { List<String> externalIps = DataAccessor.fields(service) .withKey(ServiceDiscoveryConstants.FIELD_EXTERNALIPS).withDefault(Collections.EMPTY_LIST) .as(List.class); if (externalIps != null) { for (String externalIp : externalIps) { createExternalDeploymentUnit(context, uuidToLabels, uuidToInstances, service, externalIp, null); } } // get existing maps (they will be cleaned up later if ip is no longer on the service) List<? extends ServiceExposeMap> exposeMaps = expMapDao.getNonRemovedServiceIpMaps(service.getId()); for (ServiceExposeMap exposeMap : exposeMaps) { createExternalDeploymentUnit(context, uuidToLabels, uuidToInstances, service, exposeMap.getIpAddress(), null); } } protected void createExternalDeploymentUnit(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service, String externalIp, String hostName) { String uuid = UUID.randomUUID().toString(); DeploymentUnitInstance unitInstance = createDeploymentUnitInstance(context, uuid, service, null, Pair.of(externalIp, hostName), null, ServiceDiscoveryConstants.PRIMARY_LAUNCH_CONFIG_NAME); addToDeploymentUnitList(uuidToLabels, uuidToInstances, new HashMap<String, String>(), uuid, unitInstance); } @SuppressWarnings("unchecked") protected void collectLoadBalancerServiceInstances(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service) { LoadBalancer lb = objectMgr.findOne(LoadBalancer.class, LOAD_BALANCER.SERVICE_ID, service.getId(), LOAD_BALANCER.REMOVED, null); List<? extends LoadBalancerHostMap> hostMaps = mapDao.findNonRemoved(LoadBalancerHostMap.class, LoadBalancer.class, lb.getId()); for (LoadBalancerHostMap hostMap : hostMaps) { Map<String, Object> data = DataUtils.getFields(hostMap); Map<String, String> instanceLabels = data.get(InstanceConstants.FIELD_LABELS) == null ? new HashMap<String, String>() : (Map<String, String>) data.get(InstanceConstants.FIELD_LABELS); String deploymentUnitUUID = instanceLabels .get(ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT); String launchConfigName = instanceLabels .get(ServiceDiscoveryConstants.LABEL_SERVICE_LAUNCH_CONFIG); DeploymentUnitInstance unitInstance = createDeploymentUnitInstance(context, deploymentUnitUUID, service, null, hostMap, instanceLabels, launchConfigName); addToDeploymentUnitList(uuidToLabels, uuidToInstances, instanceLabels, deploymentUnitUUID, unitInstance); } } @SuppressWarnings("unchecked") protected void collectDefaultServiceInstances(DeploymentServiceContext context, Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Service service) { List<? extends Instance> serviceContainers = expMapDao.listServiceInstances(service.getId()); for (Instance serviceContainer : serviceContainers) { Map<String, String> instanceLabels = DataAccessor.fields(serviceContainer) .withKey(InstanceConstants.FIELD_LABELS).withDefault(Collections.EMPTY_MAP).as(Map.class); String deploymentUnitUUID = instanceLabels .get(ServiceDiscoveryConstants.LABEL_SERVICE_DEPLOYMENT_UNIT); String launchConfigName = instanceLabels .get(ServiceDiscoveryConstants.LABEL_SERVICE_LAUNCH_CONFIG); DeploymentUnitInstance unitInstance = createDeploymentUnitInstance(context, deploymentUnitUUID, service, serviceContainer.getName(), serviceContainer, instanceLabels, launchConfigName); addToDeploymentUnitList(uuidToLabels, uuidToInstances, instanceLabels, deploymentUnitUUID, unitInstance); } } protected void addToDeploymentUnitList(Map<String, Map<String, String>> uuidToLabels, Map<String, List<DeploymentUnitInstance>> uuidToInstances, Map<String, String> instanceLabels, String deploymentUnitUUID, DeploymentUnitInstance unitInstance) { if (uuidToLabels.get(deploymentUnitUUID) == null) { uuidToLabels.put(deploymentUnitUUID, instanceLabels); } List<DeploymentUnitInstance> deploymentUnitInstances = uuidToInstances.get(deploymentUnitUUID); if (deploymentUnitInstances == null) { deploymentUnitInstances = new ArrayList<>(); } deploymentUnitInstances.add(unitInstance); uuidToInstances.put(deploymentUnitUUID, deploymentUnitInstances); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; import org.elasticsearch.cluster.routing.allocation.IndexMetaDataUpdater; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.IP_VALIDATOR; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; public class IndexMetaData implements Diffable<IndexMetaData>, ToXContent { /** * This class will be removed in v7.0 */ @Deprecated public interface Custom extends Diffable<Custom>, ToXContent { String type(); Custom fromMap(Map<String, Object> map) throws IOException; Custom fromXContent(XContentParser parser) throws IOException; /** * Reads the {@link org.elasticsearch.cluster.Diff} from StreamInput */ Diff<Custom> readDiffFrom(StreamInput in) throws IOException; /** * Reads an object of this type from the provided {@linkplain StreamInput}. The receiving instance remains unchanged. */ Custom readFrom(StreamInput in) throws IOException; /** * Merges from this to another, with this being more important, i.e., if something exists in this and another, * this will prevail. */ Custom mergeWith(Custom another); } public static Map<String, Custom> customPrototypes = new HashMap<>(); /** * Register a custom index meta data factory. Make sure to call it from a static block. */ public static void registerPrototype(String type, Custom proto) { customPrototypes.put(type, proto); } @Nullable public static <T extends Custom> T lookupPrototype(String type) { //noinspection unchecked return (T) customPrototypes.get(type); } public static <T extends Custom> T lookupPrototypeSafe(String type) { //noinspection unchecked T proto = (T) customPrototypes.get(type); if (proto == null) { throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]"); } return proto; } public static final ClusterBlock INDEX_READ_ONLY_BLOCK = new ClusterBlock(5, "index read-only (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); public static final ClusterBlock INDEX_READ_BLOCK = new ClusterBlock(7, "index read (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.READ)); public static final ClusterBlock INDEX_WRITE_BLOCK = new ClusterBlock(8, "index write (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE)); public static final ClusterBlock INDEX_METADATA_BLOCK = new ClusterBlock(9, "index metadata (api)", false, false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.METADATA_WRITE, ClusterBlockLevel.METADATA_READ)); public static final ClusterBlock INDEX_READ_ONLY_ALLOW_DELETE_BLOCK = new ClusterBlock(12, "index read-only / allow delete (api)", false, false, true, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.METADATA_WRITE, ClusterBlockLevel.WRITE)); public enum State { OPEN((byte) 0), CLOSE((byte) 1); private final byte id; State(byte id) { this.id = id; } public byte id() { return this.id; } public static State fromId(byte id) { if (id == 0) { return OPEN; } else if (id == 1) { return CLOSE; } throw new IllegalStateException("No state match for id [" + id + "]"); } public static State fromString(String state) { if ("open".equals(state)) { return OPEN; } else if ("close".equals(state)) { return CLOSE; } throw new IllegalStateException("No state match for [" + state + "]"); } } static Setting<Integer> buildNumberOfShardsSetting() { /* This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards per cluster. * this also prevents creating stuff like a new index with millions of shards by accident which essentially kills the entire cluster * with OOM on the spot.*/ final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); if (maxNumShards < 1) { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } return Setting.intSetting(SETTING_NUMBER_OF_SHARDS, Math.min(5, maxNumShards), 1, maxNumShards, Property.IndexScope, Property.Final); } public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; public static final Setting<Integer> INDEX_NUMBER_OF_SHARDS_SETTING = buildNumberOfShardsSetting(); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; public static final Setting<Integer> INDEX_NUMBER_OF_REPLICAS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_ROUTING_PARTITION_SIZE = "index.routing_partition_size"; public static final Setting<Integer> INDEX_ROUTING_PARTITION_SIZE_SETTING = Setting.intSetting(SETTING_ROUTING_PARTITION_SIZE, 1, 1, Property.IndexScope); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; public static final Setting<AutoExpandReplicas> INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; public static final Setting<Boolean> INDEX_READ_ONLY_SETTING = Setting.boolSetting(SETTING_READ_ONLY, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_READ = "index.blocks.read"; public static final Setting<Boolean> INDEX_BLOCKS_READ_SETTING = Setting.boolSetting(SETTING_BLOCKS_READ, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; public static final Setting<Boolean> INDEX_BLOCKS_WRITE_SETTING = Setting.boolSetting(SETTING_BLOCKS_WRITE, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; public static final Setting<Boolean> INDEX_BLOCKS_METADATA_SETTING = Setting.boolSetting(SETTING_BLOCKS_METADATA, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_READ_ONLY_ALLOW_DELETE = "index.blocks.read_only_allow_delete"; public static final Setting<Boolean> INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING = Setting.boolSetting(SETTING_READ_ONLY_ALLOW_DELETE, false, Property.Dynamic, Property.IndexScope); public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; public static final String SETTING_VERSION_UPGRADED_STRING = "index.version.upgraded_string"; public static final String SETTING_CREATION_DATE = "index.creation_date"; /** * The user provided name for an index. This is the plain string provided by the user when the index was created. * It might still contain date math expressions etc. (added in 5.0) */ public static final String SETTING_INDEX_PROVIDED_NAME = "index.provided_name"; public static final String SETTING_PRIORITY = "index.priority"; public static final Setting<Integer> INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, Property.Dynamic, Property.IndexScope); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; public static final Setting<String> INDEX_DATA_PATH_SETTING = new Setting<>(SETTING_DATA_PATH, "", Function.identity(), Property.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; public static final String INDEX_ROUTING_REQUIRE_GROUP_PREFIX = "index.routing.allocation.require"; public static final String INDEX_ROUTING_INCLUDE_GROUP_PREFIX = "index.routing.allocation.include"; public static final String INDEX_ROUTING_EXCLUDE_GROUP_PREFIX = "index.routing.allocation.exclude"; public static final Setting<Settings> INDEX_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", IP_VALIDATOR, Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", IP_VALIDATOR, Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".", IP_VALIDATOR, Property.Dynamic, Property.IndexScope); public static final Setting<Settings> INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.initial_recovery."); // this is only setable internally not a registered setting!! /** * The number of active shard copies to check for before proceeding with a write operation. */ public static final Setting<ActiveShardCount> SETTING_WAIT_FOR_ACTIVE_SHARDS = new Setting<>("index.write.wait_for_active_shards", "1", ActiveShardCount::parseString, Setting.Property.Dynamic, Setting.Property.IndexScope); public static final String KEY_IN_SYNC_ALLOCATIONS = "in_sync_allocations"; static final String KEY_VERSION = "version"; static final String KEY_ROUTING_NUM_SHARDS = "routing_num_shards"; static final String KEY_SETTINGS = "settings"; static final String KEY_STATE = "state"; static final String KEY_MAPPINGS = "mappings"; static final String KEY_ALIASES = "aliases"; public static final String KEY_PRIMARY_TERMS = "primary_terms"; public static final String INDEX_STATE_FILE_PREFIX = "state-"; private final int routingNumShards; private final int routingFactor; private final int routingPartitionSize; private final int numberOfShards; private final int numberOfReplicas; private final Index index; private final long version; private final long[] primaryTerms; private final State state; private final ImmutableOpenMap<String, AliasMetaData> aliases; private final Settings settings; private final ImmutableOpenMap<String, MappingMetaData> mappings; private final ImmutableOpenMap<String, Custom> customs; private final ImmutableOpenIntMap<Set<String>> inSyncAllocationIds; private final transient int totalNumberOfShards; private final DiscoveryNodeFilters requireFilters; private final DiscoveryNodeFilters includeFilters; private final DiscoveryNodeFilters excludeFilters; private final DiscoveryNodeFilters initialRecoveryFilters; private final Version indexCreatedVersion; private final Version indexUpgradedVersion; private final ActiveShardCount waitForActiveShards; private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, Custom> customs, ImmutableOpenIntMap<Set<String>> inSyncAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, Version indexCreatedVersion, Version indexUpgradedVersion, int routingNumShards, int routingPartitionSize, ActiveShardCount waitForActiveShards) { this.index = index; this.version = version; this.primaryTerms = primaryTerms; assert primaryTerms.length == numberOfShards; this.state = state; this.numberOfShards = numberOfShards; this.numberOfReplicas = numberOfReplicas; this.totalNumberOfShards = numberOfShards * (numberOfReplicas + 1); this.settings = settings; this.mappings = mappings; this.customs = customs; this.aliases = aliases; this.inSyncAllocationIds = inSyncAllocationIds; this.requireFilters = requireFilters; this.includeFilters = includeFilters; this.excludeFilters = excludeFilters; this.initialRecoveryFilters = initialRecoveryFilters; this.indexCreatedVersion = indexCreatedVersion; this.indexUpgradedVersion = indexUpgradedVersion; this.routingNumShards = routingNumShards; this.routingFactor = routingNumShards / numberOfShards; this.routingPartitionSize = routingPartitionSize; this.waitForActiveShards = waitForActiveShards; assert numberOfShards * routingFactor == routingNumShards : routingNumShards + " must be a multiple of " + numberOfShards; } public Index getIndex() { return index; } public String getIndexUUID() { return index.getUUID(); } /** * Test whether the current index UUID is the same as the given one. Returns true if either are _na_ */ public boolean isSameUUID(String otherUUID) { assert otherUUID != null; assert getIndexUUID() != null; if (INDEX_UUID_NA_VALUE.equals(otherUUID) || INDEX_UUID_NA_VALUE.equals(getIndexUUID())) { return true; } return otherUUID.equals(getIndexUUID()); } public long getVersion() { return this.version; } /** * The term of the current selected primary. This is a non-negative number incremented when * a primary shard is assigned after a full cluster restart or a replica shard is promoted to a primary. * * Note: since we increment the term every time a shard is assigned, the term for any operational shard (i.e., a shard * that can be indexed into) is larger than 0. See {@link IndexMetaDataUpdater#applyChanges}. **/ public long primaryTerm(int shardId) { return this.primaryTerms[shardId]; } /** * Return the {@link Version} on which this index has been created. This * information is typically useful for backward compatibility. */ public Version getCreationVersion() { return indexCreatedVersion; } /** * Return the {@link Version} on which this index has been upgraded. This * information is typically useful for backward compatibility. */ public Version getUpgradedVersion() { return indexUpgradedVersion; } public long getCreationDate() { return settings.getAsLong(SETTING_CREATION_DATE, -1L); } public State getState() { return this.state; } public int getNumberOfShards() { return numberOfShards; } public int getNumberOfReplicas() { return numberOfReplicas; } public int getRoutingPartitionSize() { return routingPartitionSize; } public boolean isRoutingPartitionedIndex() { return routingPartitionSize != 1; } public int getTotalNumberOfShards() { return totalNumberOfShards; } /** * Returns the configured {@link #SETTING_WAIT_FOR_ACTIVE_SHARDS}, which defaults * to an active shard count of 1 if not specified. */ public ActiveShardCount getWaitForActiveShards() { return waitForActiveShards; } public Settings getSettings() { return settings; } public ImmutableOpenMap<String, AliasMetaData> getAliases() { return this.aliases; } public ImmutableOpenMap<String, MappingMetaData> getMappings() { return mappings; } @Nullable public MappingMetaData mapping(String mappingType) { return mappings.get(mappingType); } public static final String INDEX_SHRINK_SOURCE_UUID_KEY = "index.shrink.source.uuid"; public static final String INDEX_SHRINK_SOURCE_NAME_KEY = "index.shrink.source.name"; public static final Setting<String> INDEX_SHRINK_SOURCE_UUID = Setting.simpleString(INDEX_SHRINK_SOURCE_UUID_KEY); public static final Setting<String> INDEX_SHRINK_SOURCE_NAME = Setting.simpleString(INDEX_SHRINK_SOURCE_NAME_KEY); public Index getMergeSourceIndex() { return INDEX_SHRINK_SOURCE_UUID.exists(settings) ? new Index(INDEX_SHRINK_SOURCE_NAME.get(settings), INDEX_SHRINK_SOURCE_UUID.get(settings)) : null; } /** * Sometimes, the default mapping exists and an actual mapping is not created yet (introduced), * in this case, we want to return the default mapping in case it has some default mapping definitions. * <p> * Note, once the mapping type is introduced, the default mapping is applied on the actual typed MappingMetaData, * setting its routing, timestamp, and so on if needed. */ @Nullable public MappingMetaData mappingOrDefault(String mappingType) { MappingMetaData mapping = mappings.get(mappingType); if (mapping != null) { return mapping; } return mappings.get(MapperService.DEFAULT_MAPPING); } public ImmutableOpenMap<String, Custom> getCustoms() { return this.customs; } @SuppressWarnings("unchecked") public <T extends Custom> T custom(String type) { return (T) customs.get(type); } public ImmutableOpenIntMap<Set<String>> getInSyncAllocationIds() { return inSyncAllocationIds; } public Set<String> inSyncAllocationIds(int shardId) { assert shardId >= 0 && shardId < numberOfShards; return inSyncAllocationIds.get(shardId); } @Nullable public DiscoveryNodeFilters requireFilters() { return requireFilters; } @Nullable public DiscoveryNodeFilters getInitialRecoveryFilters() { return initialRecoveryFilters; } @Nullable public DiscoveryNodeFilters includeFilters() { return includeFilters; } @Nullable public DiscoveryNodeFilters excludeFilters() { return excludeFilters; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } IndexMetaData that = (IndexMetaData) o; if (version != that.version) { return false; } if (!aliases.equals(that.aliases)) { return false; } if (!index.equals(that.index)) { return false; } if (!mappings.equals(that.mappings)) { return false; } if (!settings.equals(that.settings)) { return false; } if (state != that.state) { return false; } if (!customs.equals(that.customs)) { return false; } if (routingNumShards != that.routingNumShards) { return false; } if (routingFactor != that.routingFactor) { return false; } if (Arrays.equals(primaryTerms, that.primaryTerms) == false) { return false; } if (!inSyncAllocationIds.equals(that.inSyncAllocationIds)) { return false; } return true; } @Override public int hashCode() { int result = index.hashCode(); result = 31 * result + Long.hashCode(version); result = 31 * result + state.hashCode(); result = 31 * result + aliases.hashCode(); result = 31 * result + settings.hashCode(); result = 31 * result + mappings.hashCode(); result = 31 * result + customs.hashCode(); result = 31 * result + Long.hashCode(routingFactor); result = 31 * result + Long.hashCode(routingNumShards); result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); return result; } @Override public Diff<IndexMetaData> diff(IndexMetaData previousState) { return new IndexMetaDataDiff(previousState, this); } public static Diff<IndexMetaData> readDiffFrom(StreamInput in) throws IOException { return new IndexMetaDataDiff(in); } public static IndexMetaData fromXContent(XContentParser parser) throws IOException { return Builder.fromXContent(parser); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { Builder.toXContent(this, builder, params); return builder; } private static class IndexMetaDataDiff implements Diff<IndexMetaData> { private final String index; private final int routingNumShards; private final long version; private final long[] primaryTerms; private final State state; private final Settings settings; private final Diff<ImmutableOpenMap<String, MappingMetaData>> mappings; private final Diff<ImmutableOpenMap<String, AliasMetaData>> aliases; private final Diff<ImmutableOpenMap<String, Custom>> customs; private final Diff<ImmutableOpenIntMap<Set<String>>> inSyncAllocationIds; IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { index = after.index.getName(); version = after.version; routingNumShards = after.routingNumShards; state = after.state; settings = after.settings; primaryTerms = after.primaryTerms; mappings = DiffableUtils.diff(before.mappings, after.mappings, DiffableUtils.getStringKeySerializer()); aliases = DiffableUtils.diff(before.aliases, after.aliases, DiffableUtils.getStringKeySerializer()); customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff(before.inSyncAllocationIds, after.inSyncAllocationIds, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } IndexMetaDataDiff(StreamInput in) throws IOException { index = in.readString(); routingNumShards = in.readInt(); version = in.readLong(); state = State.fromId(in.readByte()); settings = Settings.readSettingsFromStream(in); primaryTerms = in.readVLongArray(); mappings = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), MappingMetaData::new, MappingMetaData::readDiffFrom); aliases = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), AliasMetaData::new, AliasMetaData::readDiffFrom); customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), new DiffableUtils.DiffableValueSerializer<String, Custom>() { @Override public Custom read(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } @SuppressWarnings("unchecked") @Override public Diff<Custom> readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); } }); inSyncAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeInt(routingNumShards); out.writeLong(version); out.writeByte(state.id); Settings.writeSettingsToStream(settings, out); out.writeVLongArray(primaryTerms); mappings.writeTo(out); aliases.writeTo(out); customs.writeTo(out); inSyncAllocationIds.writeTo(out); } @Override public IndexMetaData apply(IndexMetaData part) { Builder builder = builder(index); builder.version(version); builder.setRoutingNumShards(routingNumShards); builder.state(state); builder.settings(settings); builder.primaryTerms(primaryTerms); builder.mappings.putAll(mappings.apply(part.mappings)); builder.aliases.putAll(aliases.apply(part.aliases)); builder.customs.putAll(customs.apply(part.customs)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); return builder.build(); } } public static IndexMetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(in.readString()); builder.version(in.readLong()); builder.setRoutingNumShards(in.readInt()); builder.state(State.fromId(in.readByte())); builder.settings(readSettingsFromStream(in)); builder.primaryTerms(in.readVLongArray()); int mappingsSize = in.readVInt(); for (int i = 0; i < mappingsSize; i++) { MappingMetaData mappingMd = new MappingMetaData(in); builder.putMapping(mappingMd); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { AliasMetaData aliasMd = new AliasMetaData(in); builder.putAlias(aliasMd); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } int inSyncAllocationIdsSize = in.readVInt(); for (int i = 0; i < inSyncAllocationIdsSize; i++) { int key = in.readVInt(); Set<String> allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); builder.putInSyncAllocationIds(key, allocationIds); } return builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index.getName()); // uuid will come as part of settings out.writeLong(version); out.writeInt(routingNumShards); out.writeByte(state.id()); writeSettingsToStream(settings, out); out.writeVLongArray(primaryTerms); out.writeVInt(mappings.size()); for (ObjectCursor<MappingMetaData> cursor : mappings.values()) { cursor.value.writeTo(out); } out.writeVInt(aliases.size()); for (ObjectCursor<AliasMetaData> cursor : aliases.values()) { cursor.value.writeTo(out); } out.writeVInt(customs.size()); for (ObjectObjectCursor<String, Custom> cursor : customs) { out.writeString(cursor.key); cursor.value.writeTo(out); } out.writeVInt(inSyncAllocationIds.size()); for (IntObjectCursor<Set<String>> cursor : inSyncAllocationIds) { out.writeVInt(cursor.key); DiffableUtils.StringSetValueSerializer.getInstance().write(cursor.value, out); } } public static Builder builder(String index) { return new Builder(index); } public static Builder builder(IndexMetaData indexMetaData) { return new Builder(indexMetaData); } public static class Builder { private String index; private State state = State.OPEN; private long version = 1; private long[] primaryTerms = null; private Settings settings = Settings.Builder.EMPTY_SETTINGS; private final ImmutableOpenMap.Builder<String, MappingMetaData> mappings; private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases; private final ImmutableOpenMap.Builder<String, Custom> customs; private final ImmutableOpenIntMap.Builder<Set<String>> inSyncAllocationIds; private Integer routingNumShards; public Builder(String index) { this.index = index; this.mappings = ImmutableOpenMap.builder(); this.aliases = ImmutableOpenMap.builder(); this.customs = ImmutableOpenMap.builder(); this.inSyncAllocationIds = ImmutableOpenIntMap.builder(); } public Builder(IndexMetaData indexMetaData) { this.index = indexMetaData.getIndex().getName(); this.state = indexMetaData.state; this.version = indexMetaData.version; this.settings = indexMetaData.getSettings(); this.primaryTerms = indexMetaData.primaryTerms.clone(); this.mappings = ImmutableOpenMap.builder(indexMetaData.mappings); this.aliases = ImmutableOpenMap.builder(indexMetaData.aliases); this.customs = ImmutableOpenMap.builder(indexMetaData.customs); this.routingNumShards = indexMetaData.routingNumShards; this.inSyncAllocationIds = ImmutableOpenIntMap.builder(indexMetaData.inSyncAllocationIds); } public String index() { return index; } public Builder index(String index) { this.index = index; return this; } public Builder numberOfShards(int numberOfShards) { settings = Settings.builder().put(settings).put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build(); return this; } /** * Sets the number of shards that should be used for routing. This should only be used if the number of shards in * an index has changed ie if the index is shrunk. */ public Builder setRoutingNumShards(int routingNumShards) { this.routingNumShards = routingNumShards; return this; } /** * Returns number of shards that should be used for routing. By default this method will return the number of shards * for this index. * * @see #setRoutingNumShards(int) * @see #numberOfShards() */ public int getRoutingNumShards() { return routingNumShards == null ? numberOfShards() : routingNumShards; } /** * Returns the number of shards. * * @return the provided value or -1 if it has not been set. */ public int numberOfShards() { return settings.getAsInt(SETTING_NUMBER_OF_SHARDS, -1); } public Builder numberOfReplicas(int numberOfReplicas) { settings = Settings.builder().put(settings).put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build(); return this; } /** * Returns the number of replicas. * * @return the provided value or -1 if it has not been set. */ public int numberOfReplicas() { return settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, -1); } public Builder routingPartitionSize(int routingPartitionSize) { settings = Settings.builder().put(settings).put(SETTING_ROUTING_PARTITION_SIZE, routingPartitionSize).build(); return this; } /** * Returns the routing partition size. * * @return the provided value or -1 if it has not been set. */ public int routingPartitionSize() { return settings.getAsInt(SETTING_ROUTING_PARTITION_SIZE, -1); } public Builder creationDate(long creationDate) { settings = Settings.builder().put(settings).put(SETTING_CREATION_DATE, creationDate).build(); return this; } public Builder settings(Settings.Builder settings) { return settings(settings.build()); } public Builder settings(Settings settings) { this.settings = settings; return this; } public MappingMetaData mapping(String type) { return mappings.get(type); } public Builder putMapping(String type, String source) throws IOException { putMapping(new MappingMetaData(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true))); return this; } public Builder putMapping(MappingMetaData mappingMd) { mappings.put(mappingMd.type(), mappingMd); return this; } public Builder state(State state) { this.state = state; return this; } public Builder putAlias(AliasMetaData aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData); return this; } public Builder putAlias(AliasMetaData.Builder aliasMetaData) { aliases.put(aliasMetaData.alias(), aliasMetaData.build()); return this; } public Builder removeAlias(String alias) { aliases.remove(alias); return this; } public Builder removeAllAliases() { aliases.clear(); return this; } public Builder putCustom(String type, Custom customIndexMetaData) { this.customs.put(type, customIndexMetaData); return this; } public Set<String> getInSyncAllocationIds(int shardId) { return inSyncAllocationIds.get(shardId); } public Builder putInSyncAllocationIds(int shardId, Set<String> allocationIds) { inSyncAllocationIds.put(shardId, new HashSet<>(allocationIds)); return this; } public long version() { return this.version; } public Builder version(long version) { this.version = version; return this; } /** * returns the primary term for the given shard. * See {@link IndexMetaData#primaryTerm(int)} for more information. */ public long primaryTerm(int shardId) { if (primaryTerms == null) { initializePrimaryTerms(); } return this.primaryTerms[shardId]; } /** * sets the primary term for the given shard. * See {@link IndexMetaData#primaryTerm(int)} for more information. */ public Builder primaryTerm(int shardId, long primaryTerm) { if (primaryTerms == null) { initializePrimaryTerms(); } this.primaryTerms[shardId] = primaryTerm; return this; } private void primaryTerms(long[] primaryTerms) { this.primaryTerms = primaryTerms.clone(); } private void initializePrimaryTerms() { assert primaryTerms == null; if (numberOfShards() < 0) { throw new IllegalStateException("you must set the number of shards before setting/reading primary terms"); } primaryTerms = new long[numberOfShards()]; } public IndexMetaData build() { ImmutableOpenMap.Builder<String, AliasMetaData> tmpAliases = aliases; Settings tmpSettings = settings; // update default mapping on the MappingMetaData if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { MappingMetaData defaultMapping = mappings.get(MapperService.DEFAULT_MAPPING); for (ObjectCursor<MappingMetaData> cursor : mappings.values()) { cursor.value.updateDefaultMapping(defaultMapping); } } Integer maybeNumberOfShards = settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null); if (maybeNumberOfShards == null) { throw new IllegalArgumentException("must specify numberOfShards for index [" + index + "]"); } int numberOfShards = maybeNumberOfShards; if (numberOfShards <= 0) { throw new IllegalArgumentException("must specify positive number of shards for index [" + index + "]"); } Integer maybeNumberOfReplicas = settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null); if (maybeNumberOfReplicas == null) { throw new IllegalArgumentException("must specify numberOfReplicas for index [" + index + "]"); } int numberOfReplicas = maybeNumberOfReplicas; if (numberOfReplicas < 0) { throw new IllegalArgumentException("must specify non-negative number of shards for index [" + index + "]"); } int routingPartitionSize = INDEX_ROUTING_PARTITION_SIZE_SETTING.get(settings); if (routingPartitionSize != 1 && routingPartitionSize >= getRoutingNumShards()) { throw new IllegalArgumentException("routing partition size [" + routingPartitionSize + "] should be a positive number" + " less than the number of shards [" + getRoutingNumShards() + "] for [" + index + "]"); } // fill missing slots in inSyncAllocationIds with empty set if needed and make all entries immutable ImmutableOpenIntMap.Builder<Set<String>> filledInSyncAllocationIds = ImmutableOpenIntMap.builder(); for (int i = 0; i < numberOfShards; i++) { if (inSyncAllocationIds.containsKey(i)) { filledInSyncAllocationIds.put(i, Collections.unmodifiableSet(new HashSet<>(inSyncAllocationIds.get(i)))); } else { filledInSyncAllocationIds.put(i, Collections.emptySet()); } } final Map<String, String> requireMap = INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters requireFilters; if (requireMap.isEmpty()) { requireFilters = null; } else { requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); } Map<String, String> includeMap = INDEX_ROUTING_INCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters includeFilters; if (includeMap.isEmpty()) { includeFilters = null; } else { includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); } Map<String, String> excludeMap = INDEX_ROUTING_EXCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters excludeFilters; if (excludeMap.isEmpty()) { excludeFilters = null; } else { excludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap); } Map<String, String> initialRecoveryMap = INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters initialRecoveryFilters; if (initialRecoveryMap.isEmpty()) { initialRecoveryFilters = null; } else { initialRecoveryFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, initialRecoveryMap); } Version indexCreatedVersion = Version.indexCreated(settings); Version indexUpgradedVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_UPGRADED, indexCreatedVersion); if (primaryTerms == null) { initializePrimaryTerms(); } else if (primaryTerms.length != numberOfShards) { throw new IllegalStateException("primaryTerms length is [" + primaryTerms.length + "] but should be equal to number of shards [" + numberOfShards() + "]"); } final ActiveShardCount waitForActiveShards = SETTING_WAIT_FOR_ACTIVE_SHARDS.get(settings); if (waitForActiveShards.validate(numberOfReplicas) == false) { throw new IllegalArgumentException("invalid " + SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey() + "[" + waitForActiveShards + "]: cannot be greater than " + "number of shard copies [" + (numberOfReplicas + 1) + "]"); } final String uuid = settings.get(SETTING_INDEX_UUID, INDEX_UUID_NA_VALUE); return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters, indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(indexMetaData.getIndex().getName()); builder.field(KEY_VERSION, indexMetaData.getVersion()); builder.field(KEY_ROUTING_NUM_SHARDS, indexMetaData.getRoutingNumShards()); builder.field(KEY_STATE, indexMetaData.getState().toString().toLowerCase(Locale.ENGLISH)); boolean binary = params.paramAsBoolean("binary", false); builder.startObject(KEY_SETTINGS); for (Map.Entry<String, String> entry : indexMetaData.getSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); builder.startArray(KEY_MAPPINGS); for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) { if (binary) { builder.value(cursor.value.source().compressed()); } else { builder.map(XContentHelper.convertToMap(new BytesArray(cursor.value.source().uncompressed()), true).v2()); } } builder.endArray(); for (ObjectObjectCursor<String, Custom> cursor : indexMetaData.getCustoms()) { builder.startObject(cursor.key); cursor.value.toXContent(builder, params); builder.endObject(); } builder.startObject(KEY_ALIASES); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData.Builder.toXContent(cursor.value, builder, params); } builder.endObject(); builder.startArray(KEY_PRIMARY_TERMS); for (int i = 0; i < indexMetaData.getNumberOfShards(); i++) { builder.value(indexMetaData.primaryTerm(i)); } builder.endArray(); builder.startObject(KEY_IN_SYNC_ALLOCATIONS); for (IntObjectCursor<Set<String>> cursor : indexMetaData.inSyncAllocationIds) { builder.startArray(String.valueOf(cursor.key)); for (String allocationId : cursor.value) { builder.value(allocationId); } builder.endArray(); } builder.endObject(); builder.endObject(); } public static IndexMetaData fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == null) { // fresh parser? move to the first token parser.nextToken(); } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // on a start object move to next token parser.nextToken(); } if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { throw new IllegalArgumentException("expected field name but got a " + parser.currentToken()); } Builder builder = new Builder(parser.currentName()); String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("expected object but got a " + token); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if (KEY_SETTINGS.equals(currentFieldName)) { builder.settings(Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))); } else if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { String mappingType = currentFieldName; Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map(); builder.putMapping(new MappingMetaData(mappingType, mappingSource)); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if (KEY_ALIASES.equals(currentFieldName)) { while (parser.nextToken() != XContentParser.Token.END_OBJECT) { builder.putAlias(AliasMetaData.Builder.fromXContent(parser)); } } else if (KEY_IN_SYNC_ALLOCATIONS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { String shardId = currentFieldName; Set<String> allocationIds = new HashSet<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_STRING) { allocationIds.add(parser.text()); } } builder.putInSyncAllocationIds(Integer.valueOf(shardId), allocationIds); } else { throw new IllegalArgumentException("Unexpected token: " + token); } } } else if ("warmers".equals(currentFieldName)) { // TODO: do this in 6.0: // throw new IllegalArgumentException("Warmers are not supported anymore - are you upgrading from 1.x?"); // ignore: warmers have been removed in 5.0 and are // simply ignored when upgrading from 2.x assert Version.CURRENT.major <= 5; parser.skipChildren(); } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); if (proto == null) { //TODO warn parser.skipChildren(); } else { Custom custom = proto.fromXContent(parser); builder.putCustom(custom.type(), custom); } } } else if (token == XContentParser.Token.START_ARRAY) { if (KEY_MAPPINGS.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue()))); } else { Map<String, Object> mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); builder.putMapping(new MappingMetaData(mappingType, mapping)); } } } } else if (KEY_PRIMARY_TERMS.equals(currentFieldName)) { LongArrayList list = new LongArrayList(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.VALUE_NUMBER) { list.add(parser.longValue()); } else { throw new IllegalStateException("found a non-numeric value under [" + KEY_PRIMARY_TERMS + "]"); } } builder.primaryTerms(list.toArray()); } else { throw new IllegalArgumentException("Unexpected field for an array " + currentFieldName); } } else if (token.isValue()) { if (KEY_STATE.equals(currentFieldName)) { builder.state(State.fromString(parser.text())); } else if (KEY_VERSION.equals(currentFieldName)) { builder.version(parser.longValue()); } else if (KEY_ROUTING_NUM_SHARDS.equals(currentFieldName)) { builder.setRoutingNumShards(parser.intValue()); } else { throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]"); } } else { throw new IllegalArgumentException("Unexpected token " + token); } } return builder.build(); } } /** * Adds human readable version and creation date settings. * This method is used to display the settings in a human readable format in REST API */ public static Settings addHumanReadableSettings(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); Version version = settings.getAsVersion(SETTING_VERSION_CREATED, null); if (version != null) { builder.put(SETTING_VERSION_CREATED_STRING, version.toString()); } Version versionUpgraded = settings.getAsVersion(SETTING_VERSION_UPGRADED, null); if (versionUpgraded != null) { builder.put(SETTING_VERSION_UPGRADED_STRING, versionUpgraded.toString()); } Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); if (creationDate != null) { DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC); builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString()); } return builder.build(); } private static final ToXContent.Params FORMAT_PARAMS = new MapParams(Collections.singletonMap("binary", "true")); /** * State format for {@link IndexMetaData} to write to and load from disk */ public static final MetaDataStateFormat<IndexMetaData> FORMAT = new MetaDataStateFormat<IndexMetaData>(XContentType.SMILE, INDEX_STATE_FILE_PREFIX) { @Override public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOException { Builder.toXContent(state, builder, FORMAT_PARAMS); } @Override public IndexMetaData fromXContent(XContentParser parser) throws IOException { return Builder.fromXContent(parser); } }; /** * Returns the number of shards that should be used for routing. This basically defines the hash space we use in * {@link org.elasticsearch.cluster.routing.OperationRouting#generateShardId(IndexMetaData, String, String)} to route documents * to shards based on their ID or their specific routing value. The default value is {@link #getNumberOfShards()}. This value only * changes if and index is shrunk. */ public int getRoutingNumShards() { return routingNumShards; } /** * Returns the routing factor for this index. The default is <tt>1</tt>. * * @see #getRoutingFactor(IndexMetaData, int) for details */ public int getRoutingFactor() { return routingFactor; } /** * Returns the source shard ids to shrink into the given shard id. * @param shardId the id of the target shard to shrink to * @param sourceIndexMetadata the source index metadata * @param numTargetShards the total number of shards in the target index * @return a set of shard IDs to shrink into the given shard ID. */ public static Set<ShardId> selectShrinkShards(int shardId, IndexMetaData sourceIndexMetadata, int numTargetShards) { if (shardId >= numTargetShards) { throw new IllegalArgumentException("the number of target shards (" + numTargetShards + ") must be greater than the shard id: " + shardId); } int routingFactor = getRoutingFactor(sourceIndexMetadata, numTargetShards); Set<ShardId> shards = new HashSet<>(routingFactor); for (int i = shardId * routingFactor; i < routingFactor*shardId + routingFactor; i++) { shards.add(new ShardId(sourceIndexMetadata.getIndex(), i)); } return shards; } /** * Returns the routing factor for and shrunk index with the given number of target shards. * This factor is used in the hash function in * {@link org.elasticsearch.cluster.routing.OperationRouting#generateShardId(IndexMetaData, String, String)} to guarantee consistent * hashing / routing of documents even if the number of shards changed (ie. a shrunk index). * * @param sourceIndexMetadata the metadata of the source index * @param targetNumberOfShards the total number of shards in the target index * @return the routing factor for and shrunk index with the given number of target shards. * @throws IllegalArgumentException if the number of source shards is greater than the number of target shards or if the source shards * are not divisible by the number of target shards. */ public static int getRoutingFactor(IndexMetaData sourceIndexMetadata, int targetNumberOfShards) { int sourceNumberOfShards = sourceIndexMetadata.getNumberOfShards(); if (sourceNumberOfShards < targetNumberOfShards) { throw new IllegalArgumentException("the number of target shards must be less that the number of source shards"); } int factor = sourceNumberOfShards / targetNumberOfShards; if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) { throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a multiple of [" + targetNumberOfShards + "]"); } return factor; } }
// ---------------------------------------------------------------------------- // Copyright 2007-2017, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Description: // Thread pool manager // ---------------------------------------------------------------------------- // Change History: // 2006/03/26 Martin D. Flynn // -Initial release // 2006/04/03 Martin D. Flynn // -Removed reference to JavaMail api imports // 2006/06/30 Martin D. Flynn // -Repackaged // 2006/11/28 Martin D. Flynn // -Added method "setMaxPoolSize(size)" // 2013/04/08 Martin D. Flynn // -Added global "StopThreads(...)" method to stop all active threads in all // ThreadPools. // 2013/09/20 Martin D. Flynn // -Support property override for "maximumPoolSize"/"maximumIdleSeconds" // Property: ThreadPool.GROUP_NAME.maximumPoolSize=SIZE // Property: ThreadPool.GROUP_NAME.maximumIdleSeconds=SECONDS // ---------------------------------------------------------------------------- package org.opengts.util; import java.util.*; /** *** Thread pool manager **/ public class ThreadPool { // ------------------------------------------------------------------------ private static final int DFT_POOL_SIZE = 20; private static final int DFT_MAX_IDLE_AGE_SEC = 0; private static final long DFT_MAX_IDLE_AGE_MS = (long)DFT_MAX_IDLE_AGE_SEC * 1000L; private static final int DFT_MAX_QUEUE_SIZE = 0; public static final int STOP_WAITING = -1; public static final int STOP_NEVER = 0; public static final int STOP_NOW = 1; // ------------------------------------------------------------------------ private static boolean globalStopThreadsNow = false; private static Map<ThreadPool,String> threadPoolList = new WeakHashMap<ThreadPool,String>(); /** *** Adds a ThreadPool to a global list *** @param tp A ThreadPool **/ private static void _AddThreadPool(ThreadPool tp) { if (tp != null) { synchronized (ThreadPool.threadPoolList) { try { if (!ThreadPool.threadPoolList.containsKey(tp)) { ThreadPool.threadPoolList.put(tp,""); } } catch (Throwable th) { // not sure what could be thrown here for weak-references, but catch just in case Print.logException("ThreadPool weak reference list", th); } } } } /** *** Tell all active threads to stop *** @param stopNow True to stop threads, even if jobs are still queued. False *** to stop only after all jobs have been processed. (note that *** jobs currently being processed will continue until they are *** done). **/ public static void StopThreads(boolean stopNow) // shutdown { synchronized (ThreadPool.threadPoolList) { if (stopNow) { ThreadPool.globalStopThreadsNow = true; } for (ThreadPool tp : ThreadPool.threadPoolList.keySet()) { tp.stopThreads(stopNow); } } } /** *** Tell all active threads to stop **/ public static int GetTotalThreadCount() { int count = 0; synchronized (ThreadPool.threadPoolList) { for (ThreadPool tp : ThreadPool.threadPoolList.keySet()) { count += tp.getPoolSize(); } } return count; } /** *** Tell all active threads to stop **/ public static int PrintThreadCount() { int count = 0; synchronized (ThreadPool.threadPoolList) { for (ThreadPool tp : ThreadPool.threadPoolList.keySet()) { String n = tp.getName(); int s = tp.getPoolSize(); Print.logInfo("ThreadPool '" + n + "' size=" + s); count += s; } } return count; } /** *** Gets the ThreadPool state **/ public static StringBuffer GetThreadPoolState(StringBuffer sb) { if (sb == null) { sb = new StringBuffer(); } synchronized (ThreadPool.threadPoolList) { sb.append("ThreadPools:\n"); if (!ListTools.isEmpty(ThreadPool.threadPoolList)) { for (ThreadPool tp : ThreadPool.threadPoolList.keySet()) { String name = tp.getName(); int pSize = tp.getPoolSize(); int maxPSize = tp.getMaxPoolSize(); int active = tp.getActiveCount(); int qSize = tp.getQueueSize(); int maxQSize = tp.getMaxQueueSize(); sb.append(" "); sb.append("Name=").append(name).append(" "); for (int s=18, n=name.length(); s>n; s--) {sb.append(" ");} sb.append("MaxPoolSize=" ).append(maxPSize).append(" "); sb.append("PoolSize=" ).append(pSize ).append(" "); sb.append("Active=" ).append(active ).append(" "); sb.append("MaxQueueSize=").append(maxQSize).append(" "); sb.append("QueueSize=" ).append(qSize ).append(" "); sb.append("\n"); } } else { sb.append(" "); sb.append("None"); sb.append("\n"); } } return sb; } // ------------------------------------------------------------------------ /* The ThreadGroup for this pool */ private ThreadGroup poolGroup = null; /* the maximum number of allowed threads in this pool */ private int maxPoolSize = DFT_POOL_SIZE; /* the maximum allowed idle time of a thread before it is removed from the pool */ private long maxIdleAgeMS = DFT_MAX_IDLE_AGE_MS; /* the list of threads in this pool */ private java.util.List<ThreadJob> jobThreadPool = null; /* the sequence id used for naming individual threads */ private int threadId = 1; /* the list of yet-to-be-processed jobs */ private java.util.List<Runnable> jobQueue = null; /* the maximum number of waiting jobs (ie. in "jobQueue") */ private int maxQueueSize = DFT_MAX_QUEUE_SIZE; /* semiphore used to indicate waiting for job */ private int waitingCount = 0; /* true to gracefully stop/remove threads from this pool */ private int stopThreads = STOP_NEVER; /** *** Constuctor *** @param name The name of the thread pool **/ public ThreadPool(String name) { this(name, DFT_POOL_SIZE, DFT_MAX_IDLE_AGE_SEC, DFT_MAX_QUEUE_SIZE); } /** *** Constructor *** @param name The name of the thread pool *** @param maxPoolSize The maximum number of threads in the thread pool **/ public ThreadPool(String name, int maxPoolSize) { this(name, maxPoolSize, DFT_MAX_IDLE_AGE_SEC, DFT_MAX_QUEUE_SIZE); } /** *** Constructor *** @param name The name of the thread pool *** @param maxPoolSize The maximum number of threads in the thread pool *** @param maxIdleSec The maximum number of seconds a thread is allowed to remain *** idle before it self-terminates. *** @param maxQueueSize The maximum number of jobs allowed in queue **/ public ThreadPool(String name, int maxPoolSize, int maxIdleSec, int maxQueueSize) { this(name, RTKey.valueOf(!StringTools.isBlank(name)?("ThreadPool."+name):null), maxPoolSize, maxIdleSec, maxQueueSize); } /** *** Constructor *** @param name The name of the thread pool *** @param propPfx_ The property key prefix from which the default attributes *** for this ThreadPool will be obtained. *** @param maxPoolSize The maximum number of threads in the thread pool ("maximumPoolSize") *** @param maxIdleSec The maximum number of seconds a thread is allowed to remain *** idle before it self-terminates ("maximumIdleSeconds") *** @param maxQueueSize The maximum number of jobs allowed in queue ("maximumQueueSize") **/ public ThreadPool(String name, RTKey propPfx_, int maxPoolSize, int maxIdleSec, int maxQueueSize) { super(); /* init vars */ String groupName = !StringTools.isBlank(name)? name.trim() : "ThreadPool"; this.poolGroup = new ThreadGroup(groupName); this.jobThreadPool = new Vector<ThreadJob>(); this.jobQueue = new Vector<Runnable>(); this.stopThreads = ThreadPool.globalStopThreadsNow? STOP_NOW : STOP_NEVER; /* set maxPoolSize/maxIdleSec */ if (!RTKey.isBlank(propPfx_)) { // IE: // ThreadPool.PoolName.maximumPoolSize=50 // ThreadPool.PoolName.maximumIdleSeconds=0 // ThreadPool.PoolName.maximumQueueSize=0 this.setMaxPoolSize( propPfx_.rtSuffix("maximumPoolSize" ), maxPoolSize ); this.setMaxIdleSec( propPfx_.rtSuffix("maximumIdleSeconds"), maxIdleSec ); this.setMaxQueueSize(propPfx_.rtSuffix("maximumQueueSize" ), maxQueueSize); } else { this.setMaxPoolSize( maxPoolSize ); this.setMaxIdleSec( maxIdleSec ); this.setMaxQueueSize(maxQueueSize); } /* add to global manager */ ThreadPool._AddThreadPool(this); } // ------------------------------------------------------------------------ /** *** Gets the name of the thread pool *** @return The name of the thread pool **/ public String getName() { return this.getThreadGroup().getName(); } /** *** Returns the name of the thread pool *** @return The name of the thread pool **/ public String toString() { return this.getName(); } /** *** Returns true if this object is equal to <code>other</code>. This will *** only return true if they are the same object *** @param other The object to check equality with *** @return True if <code>other</code> is the same object **/ public boolean equals(Object other) { return (this == other); // equals only if same object } /** *** Returns a hash code value for the object. **/ public int hashCode() { return super.hashCode(); } // ------------------------------------------------------------------------ /** *** Gets the thread group of the Threads in this pool *** @return The thread group of the Threads in this pool **/ public ThreadGroup getThreadGroup() { return this.poolGroup; } // ------------------------------------------------------------------------ /** *** Gets the number of currently active jobs **/ public int getActiveCount() { int cnt = 0; synchronized (this.jobThreadPool) { for (ThreadJob tj : this.jobThreadPool) { if (tj.isRunning()) { cnt++; } } } return cnt; } /** *** Gets the current size of this thread pool *** @return The number of thread jobs in this thread pool **/ public int getPoolSize() { int size = 0; synchronized (this.jobThreadPool) { size = this.jobThreadPool.size(); } return size; } // ------------------------------------------------------------------------ /** *** Sets the maximum size of this thread pool *** @param maxSize The maximum size of the thread pool **/ public void setMaxPoolSize(int maxSize) { this.maxPoolSize = (maxSize > 0)? maxSize : DFT_POOL_SIZE; } /** *** Sets the maximum size of this thread pool *** @param propKey The property key name to use for looking up the overriding *** value in the runtime configuration properties. *** @param dftMaxSize The maximum size of the thread pool **/ public void setMaxPoolSize(RTKey propKey, int dftMaxSize) { int propMps = (propKey != null)? RTConfig.getInt(propKey.toString(),-1) : -1; if (propMps > 0) { this.maxPoolSize = propMps; } else if (dftMaxSize > 0) { this.maxPoolSize = dftMaxSize; } else { this.maxPoolSize = DFT_POOL_SIZE; } //Print.logDebug("["+this.getName()+"] ThreadPool 'maximumPoolSize': " + this.maxPoolSize); } /** *** Gets the maximum size of this thread pool *** @return The maximum size of the thread pool **/ public int getMaxPoolSize() { return this.maxPoolSize; } // ------------------------------------------------------------------------ /** *** Sets the maximum number of seconds that a thread is allowed to remain idle *** before it self-terminates. *** @param maxIdleSec The maximum number of idle seconds **/ public void setMaxIdleSec(int maxIdleSec) { this.setMaxIdleMS((long)maxIdleSec * 1000L); } /** *** Sets the maximum number of seconds that a thread is allowed to remain idle *** before it self-terminates. *** @param propKeySec The propery name from which the maximum number of idle *** seconds will attempt to be retrieved. *** @param dftMaxIdleSec The default maximum number of idle seconds. **/ public void setMaxIdleSec(RTKey propKeySec, int dftMaxIdleSec) { int propMidSec = (propKeySec != null)? RTConfig.getInt(propKeySec.toString(),-1) : -1; if (propMidSec >= 0) { this.setMaxIdleMS((long)propMidSec * 1000L); } else if (dftMaxIdleSec >= 0) { this.setMaxIdleMS((long)dftMaxIdleSec * 1000L); } else { this.setMaxIdleMS(DFT_MAX_IDLE_AGE_MS); } //Print.logDebug("["+this.getName()+"] ThreadPool 'maximumIdleSec': " + this.getMaxIdleMS()/1000L); } /** *** Sets the maximum number of milliseconds that a thread is allowed to remain idle *** before it self terminates. *** @param maxIdleMS The maximum number of idle milliseconds **/ public void setMaxIdleMS(long maxIdleMS) { this.maxIdleAgeMS = (maxIdleMS >= 0L)? maxIdleMS : DFT_MAX_IDLE_AGE_MS; } /** *** Gets the maximum number of milliseconds that a thread is allowed to remain idle *** before it self terminates. *** @return The maximum idle milliseconds **/ public long getMaxIdleMS() { return this.maxIdleAgeMS; } // ------------------------------------------------------------------------ /** *** Sets the maximum allowed number of waiting jobs (in "jobQueue") *** @param maxQSize The maximum allowed number of waiting jobs **/ public void setMaxQueueSize(int maxQSize) { this.maxQueueSize = (maxQSize >= 0)? maxQSize : DFT_MAX_QUEUE_SIZE; } /** *** Sets the maximum allowed number of waiting jobs (in "jobQueue") *** @param propKey The property key name to use for looking up the overriding *** value in the runtime configuration properties. *** @param dftMaxQSize The maximum allowed number of waiting jobs **/ public void setMaxQueueSize(RTKey propKey, int dftMaxQSize) { int propMqs = (propKey != null)? RTConfig.getInt(propKey.toString(),-1) : -1; if (propMqs > 0) { this.maxQueueSize = propMqs; } else if (dftMaxQSize > 0) { this.maxQueueSize = dftMaxQSize; } else { this.maxQueueSize = DFT_MAX_QUEUE_SIZE; } //Print.logDebug("["+this.getName()+"] ThreadPool 'maximumQueueSize': " + this.maxQueueSize); } /** *** Gets the maximum allowed number of waiting jobs (in "jobQueue") *** @return The maximum allowed number of waiting jobs (in "jobQueue") **/ public int getMaxQueueSize() { return this.maxQueueSize; } // ------------------------------------------------------------------------ /** *** Adds a new job to the thread pool's queue *** @param job The job to add to the queue **/ public boolean run(Runnable job) { if (job == null) { // ignore null jobs return false; } else if (this.stopThreads == STOP_NOW) { // -- ignore job if this ThreadPool is in the process of stopping now. return false; } else { int maxQueueSize = this.getMaxQueueSize(); boolean addedJob = false; synchronized (this.jobThreadPool) { // <-- modification of threadPool is likely synchronized (this.jobQueue) { // <-- modification of job queue mandatory if ((maxQueueSize <= 0) || (this.jobQueue.size() < maxQueueSize)) { // -- It's possible that we may end up adding more threads than we need if this // - section executes multiple times before the newly added thread has a chance // - to pull a job off the queue. this.jobQueue.add(job); if ((this.waitingCount == 0) && (this.jobThreadPool.size() < this.maxPoolSize)) { String tn = StringTools.format(this.threadId++,"000").trim(); ThreadJob tj = new ThreadJob(this, (this.getName() + "_" + tn)); this.jobThreadPool.add(tj); Print.logDebug("New Thread: " + tj.getName() + " [" + this.getMaxPoolSize() + "]"); } this.jobQueue.notify(); // notify a waiting thread addedJob = true; } } } // -- return true if job was added. // - will only return false if the job queue is already at maximum size return addedJob; } } /** *** Gets the job queue size (jobs not yet processed) **/ public int getQueueSize() { int qsize = 0; synchronized (this.jobQueue) { qsize = this.jobQueue.size(); } return qsize; } // ------------------------------------------------------------------------ /** *** Stops all threads in this pool once queued jobs are complete **/ public void stopThreads() // shutdown { this.stopThreads(false); // stop when jobs are done } /** *** Stops all threads in this pool once queued jobs are complete *** @param stopNow True to stop threads, even if jobs are still queued. False *** to stop only after all jobs have been processed. (note that *** jobs currently being processed will continue until they are *** done). **/ public void stopThreads(boolean stopNow) // shutdown { synchronized (this.jobQueue) { this.stopThreads = stopNow? STOP_NOW : STOP_WAITING; this.jobQueue.notifyAll(); } } /** *** Removes the specified worker thread from the pool *** @param thread The thread to remove from the pool **/ protected void _removeThreadJob(ThreadJob thread) { if (thread != null) { synchronized (this.jobThreadPool) { //Print.logDebug("Removing thread: " + thread.getName()); this.jobThreadPool.remove(thread); } } } // ------------------------------------------------------------------------ private static class ThreadJob extends Thread { /* ThreadPool to which this thread belongs */ private ThreadPool threadPool = null; /* the current job being executed */ private Runnable job = null; /* timestamps */ private long creationTimeMS = 0L; private long lastUsedTimeMS = 0L; public ThreadJob(ThreadPool pool, String name) { super(pool.getThreadGroup(), name); this.threadPool = pool; this.creationTimeMS = DateTime.getCurrentTimeMillis(); this.lastUsedTimeMS = this.creationTimeMS; this.start(); // auto start thread } public void run() { /* loop forever (or until stopped) */ while (true) { /* get next job */ // 'this.job' is always null here boolean stop = false; synchronized (this.threadPool.jobQueue) { //Print.logDebug("Thread checking for jobs: " + this.getName()); while (this.job == null) { if (this.threadPool.stopThreads == STOP_NOW) { // stop now, no more jobs stop = true; break; } else if (this.threadPool.jobQueue.size() > 0) { // this.jobQueue // run next job this.job = this.threadPool.jobQueue.remove(0); // Runnable } else if (this.threadPool.stopThreads == STOP_WAITING) { // stop after all jobs have completed stop = true; break; } else if ((this.threadPool.maxIdleAgeMS > 0L) && ((DateTime.getCurrentTimeMillis() - this.lastUsedTimeMS) > this.threadPool.maxIdleAgeMS)) { // stop due to excess idle time stop = true; break; } else { // wait for next job notification int tmoMS = 20000; // maximum wait (should probably be higher) // TODO: adjust 'tmpMS' to coincide with remaining 'maxIdleAgeMS' this.threadPool.waitingCount++; try { this.threadPool.jobQueue.wait(tmoMS); } catch (InterruptedException ie) {} this.threadPool.waitingCount--; // continue next loop } } // while (this.job == null) } if (stop) { break; } /* run job */ //Print.logDebug("Thread running: " + this.getName()); this.job.run(); synchronized (this.threadPool.jobQueue) { this.job = null; } this.lastUsedTimeMS = DateTime.getCurrentTimeMillis(); } // while (true) /* remove thread from pool */ this.threadPool._removeThreadJob(this); } public boolean isRunning() { boolean rtn = false; synchronized (this.threadPool.jobQueue) { rtn = (this.job != null)? true : false; } return rtn; } } // class ThreadJob // ------------------------------------------------------------------------ /** *** Main entry point for testing/debugging *** @param argv Comand-line arguments **/ public static void main(String argv[]) { RTConfig.setCommandLineArgs(argv); // ThreadPool.Test_1.maximumPoolSize=30 // ThreadPool.Test_1.maximumIdleSeconds=30 RTKey propPfx = RTKey.valueOf(RTConfig.getString("prop",null)); ThreadPool pool_1 = new ThreadPool("Test_1", propPfx, 3/*pool*/,-1/*idleSec*/,-1/*queueSize*/); ThreadPool pool_2 = new ThreadPool("Test_2", 3); for (int i = 0; i < 15; i++) { final int n = i; Print.logInfo("Job " + i); Runnable job = new Runnable() { int num = n; public void run() { Print.logInfo("Start Job: " + this.getName()); try { Thread.sleep(2000 + (num * 89)); } catch (Throwable t) {} Print.logInfo("Stop Job: " + this.getName()); } public String getName() { return "[" + Thread.currentThread().getName() + "] " + num; } }; if ((i & 1) == 0) { pool_1.run(job); } else { pool_2.run(job); } try { Thread.sleep(100); } catch (Throwable t) {} } Print.logInfo("Stop Threads"); ThreadPool.StopThreads(true); // Stop now for (int i = 0; i < 20; i++) { Print.sysPrintln("---------------------------"); int cnt = ThreadPool.PrintThreadCount(); if (cnt <= 0) { break; } try { Thread.sleep(1000); } catch (Throwable t) {} } Print.sysPrintln("Total Thread Count: " + ThreadPool.GetTotalThreadCount()); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.breakpoints.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.util.treeView.TreeState; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Disposer; import com.intellij.ui.*; import com.intellij.ui.popup.util.DetailController; import com.intellij.ui.popup.util.DetailViewImpl; import com.intellij.ui.popup.util.ItemWrapper; import com.intellij.ui.popup.util.MasterController; import com.intellij.util.Function; import com.intellij.util.SingleAlarm; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashSet; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.xdebugger.XDebuggerBundle; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.breakpoints.XBreakpointType; import com.intellij.xdebugger.breakpoints.ui.XBreakpointGroupingRule; import com.intellij.xdebugger.impl.breakpoints.XBreakpointBase; import com.intellij.xdebugger.impl.breakpoints.XBreakpointManagerImpl; import com.intellij.xdebugger.impl.breakpoints.XBreakpointUtil; import com.intellij.xdebugger.impl.breakpoints.XBreakpointsDialogState; import com.intellij.xdebugger.impl.breakpoints.ui.grouping.XBreakpointCustomGroup; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointItemNode; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointItemsTreeController; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointsCheckboxTree; import com.intellij.xdebugger.impl.breakpoints.ui.tree.BreakpointsGroupNode; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.util.*; import java.util.List; public class BreakpointsDialog extends DialogWrapper { @NotNull private final Project myProject; private final Object myInitialBreakpoint; private final List<BreakpointPanelProvider> myBreakpointsPanelProviders; private BreakpointItemsTreeController myTreeController; final JLabel temp = new JLabel(); private final MasterController myMasterController = new MasterController() { @Override public ItemWrapper[] getSelectedItems() { final List<BreakpointItem> res = myTreeController.getSelectedBreakpoints(false); return res.toArray(new ItemWrapper[res.size()]); } @Override public JLabel getPathLabel() { return temp; } }; private final DetailController myDetailController = new DetailController(myMasterController); private final Collection<BreakpointItem> myBreakpointItems = new ArrayList<>(); private final SingleAlarm myRebuildAlarm = new SingleAlarm(new Runnable() { @Override public void run() { collectItems(); myTreeController.rebuildTree(myBreakpointItems); myDetailController.doUpdateDetailView(true); } }, 100, myDisposable); private final List<XBreakpointGroupingRule> myRulesAvailable = new ArrayList<>(); private final Set<XBreakpointGroupingRule> myRulesEnabled = new TreeSet<>(XBreakpointGroupingRule.PRIORITY_COMPARATOR); private final Disposable myListenerDisposable = Disposer.newDisposable(); private final List<ToggleActionButton> myToggleRuleActions = new ArrayList<>(); private XBreakpointManagerImpl getBreakpointManager() { return (XBreakpointManagerImpl)XDebuggerManager.getInstance(myProject).getBreakpointManager(); } protected BreakpointsDialog(@NotNull Project project, Object breakpoint, @NotNull List<BreakpointPanelProvider> providers) { super(project); myProject = project; myBreakpointsPanelProviders = providers; myInitialBreakpoint = breakpoint; collectGroupingRules(); collectItems(); setTitle("Breakpoints"); setModal(false); init(); setOKButtonText("Done"); } private String getSplitterProportionKey() { return getDimensionServiceKey() + ".splitter"; } @Nullable @Override protected JComponent createCenterPanel() { JPanel mainPanel = new JPanel(new BorderLayout()); JBSplitter splitPane = new JBSplitter(0.3f); splitPane.setSplitterProportionKey(getSplitterProportionKey()); splitPane.setFirstComponent(createMasterView()); splitPane.setSecondComponent(createDetailView()); mainPanel.add(splitPane, BorderLayout.CENTER); return mainPanel; } private JComponent createDetailView() { DetailViewImpl detailView = new DetailViewImpl(myProject); detailView.setEmptyLabel(XDebuggerBundle.message("xbreakpoint.label.empty")); myDetailController.setDetailView(detailView); return detailView; } void collectItems() { if (!myBreakpointsPanelProviders.isEmpty()) { disposeItems(); myBreakpointItems.clear(); for (BreakpointPanelProvider panelProvider : myBreakpointsPanelProviders) { panelProvider.provideBreakpointItems(myProject, myBreakpointItems); } } } void initSelection(Collection<BreakpointItem> breakpoints) { XBreakpointsDialogState settings = (getBreakpointManager()).getBreakpointsDialogSettings(); if (settings != null && settings.getTreeState() != null) { settings.getTreeState().applyTo(myTreeController.getTreeView()); if (myTreeController.getTreeView().getSelectionCount() == 0) { myTreeController.selectFirstBreakpointItem(); } } else { TreeUtil.expandAll(myTreeController.getTreeView()); myTreeController.selectFirstBreakpointItem(); } selectBreakpoint(myInitialBreakpoint); } @Nullable @Override protected String getDimensionServiceKey() { return getClass().getName(); } @NotNull @Override protected Action[] createActions() { return new Action[]{getOKAction(), getHelpAction()}; } private class ToggleBreakpointGroupingRuleEnabledAction extends ToggleActionButton { private final XBreakpointGroupingRule myRule; public ToggleBreakpointGroupingRuleEnabledAction(XBreakpointGroupingRule rule) { super(rule.getPresentableName(), rule.getIcon()); myRule = rule; getTemplatePresentation().setText(rule.getPresentableName()); } @Override public boolean isSelected(AnActionEvent e) { return myRulesEnabled.contains(myRule); } @Override public void setSelected(AnActionEvent e, boolean state) { if (state) { myRulesEnabled.add(myRule); } else { myRulesEnabled.remove(myRule); } myTreeController.setGroupingRules(myRulesEnabled); } } private JComponent createMasterView() { myTreeController = new BreakpointItemsTreeController(myRulesEnabled) { @Override public void nodeStateWillChangeImpl(CheckedTreeNode node) { if (node instanceof BreakpointItemNode) { ((BreakpointItemNode)node).getBreakpointItem().saveState(); } super.nodeStateWillChangeImpl(node); } @Override public void nodeStateDidChangeImpl(CheckedTreeNode node) { super.nodeStateDidChangeImpl(node); if (node instanceof BreakpointItemNode) { myDetailController.doUpdateDetailView(true); } } @Override protected void selectionChangedImpl() { super.selectionChangedImpl(); saveCurrentItem(); myDetailController.updateDetailView(); } }; final JTree tree = new BreakpointsCheckboxTree(myProject, myTreeController) { @Override protected void onDoubleClick(CheckedTreeNode node) { if (node instanceof BreakpointsGroupNode) { TreePath path = TreeUtil.getPathFromRoot(node); if (isExpanded(path)) { collapsePath(path); } else { expandPath(path); } } else { navigate(false); } } }; PopupHandler.installPopupHandler(tree, new ActionGroup() { @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { ActionGroup group = new ActionGroup("Move to group", true) { @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { Set<String> groups = getBreakpointManager().getAllGroups(); AnAction[] res = new AnAction[groups.size()+3]; int i = 0; res[i++] = new MoveToGroupAction(null); for (String group : groups) { res[i++] = new MoveToGroupAction(group); } res[i++] = new Separator(); res[i] = new MoveToGroupAction(); return res; } }; List<AnAction> res = new ArrayList<>(); res.add(group); Object component = tree.getLastSelectedPathComponent(); if (tree.getSelectionCount() == 1 && component instanceof BreakpointsGroupNode && ((BreakpointsGroupNode)component).getGroup() instanceof XBreakpointCustomGroup) { res.add(new SetAsDefaultGroupAction((XBreakpointCustomGroup)((BreakpointsGroupNode)component).getGroup())); } if (tree.getSelectionCount() == 1 && component instanceof BreakpointItemNode) { res.add(new EditDescriptionAction((XBreakpointBase)((BreakpointItemNode)component).getBreakpointItem().getBreakpoint())); } return res.toArray(new AnAction[res.size()]); } }, ActionPlaces.UNKNOWN, ActionManager.getInstance()); new AnAction("BreakpointDialog.GoToSource") { @Override public void actionPerformed(AnActionEvent e) { navigate(true); close(OK_EXIT_CODE); } }.registerCustomShortcutSet(CommonShortcuts.ENTER, tree, myDisposable); new AnAction("BreakpointDialog.ShowSource") { @Override public void actionPerformed(AnActionEvent e) { navigate(true); close(OK_EXIT_CODE); } }.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet(), tree, myDisposable); final DefaultActionGroup breakpointTypes = new DefaultActionGroup(); for (XBreakpointType<?, ?> type : XBreakpointUtil.getBreakpointTypes()) { if (type.isAddBreakpointButtonVisible()) { breakpointTypes.addAll(new AddXBreakpointAction(type)); } } ToolbarDecorator decorator = ToolbarDecorator.createDecorator(tree). setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { JBPopupFactory.getInstance() .createActionGroupPopup(null, breakpointTypes, DataManager.getInstance().getDataContext(button.getContextComponent()), JBPopupFactory.ActionSelectionAid.NUMBERING, false) .show(button.getPreferredPopupPoint()); } }). setRemoveAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { myTreeController.removeSelectedBreakpoints(myProject); } }). setRemoveActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { for (BreakpointItem item : myTreeController.getSelectedBreakpoints(true)) { if (item.allowedToRemove()) { return true; } } return false; } }). setToolbarPosition(ActionToolbarPosition.TOP). setToolbarBorder(IdeBorderFactory.createEmptyBorder()); for (ToggleActionButton action : myToggleRuleActions) { decorator.addExtraAction(action); } JPanel decoratedTree = decorator.createPanel(); decoratedTree.setBorder(IdeBorderFactory.createEmptyBorder()); JScrollPane pane = UIUtil.getParentOfType(JScrollPane.class, tree); if (pane != null) pane.setBorder(IdeBorderFactory.createBorder()); myTreeController.setTreeView(tree); myTreeController.buildTree(myBreakpointItems); initSelection(myBreakpointItems); final BreakpointPanelProvider.BreakpointsListener listener = new BreakpointPanelProvider.BreakpointsListener() { @Override public void breakpointsChanged() { myRebuildAlarm.cancelAndRequest(); } }; for (BreakpointPanelProvider provider : myBreakpointsPanelProviders) { provider.addListener(listener, myProject, myListenerDisposable); } return decoratedTree; } private void navigate(final boolean requestFocus) { List<BreakpointItem> breakpoints = myTreeController.getSelectedBreakpoints(false); if (!breakpoints.isEmpty()) { breakpoints.get(0).navigate(requestFocus); } } @Nullable @Override public JComponent getPreferredFocusedComponent() { return myTreeController.getTreeView(); } private void collectGroupingRules() { for (BreakpointPanelProvider provider : myBreakpointsPanelProviders) { provider.createBreakpointsGroupingRules(myRulesAvailable); } Collections.sort(myRulesAvailable, XBreakpointGroupingRule.PRIORITY_COMPARATOR); myRulesEnabled.clear(); XBreakpointsDialogState settings = (getBreakpointManager()).getBreakpointsDialogSettings(); for (XBreakpointGroupingRule rule : myRulesAvailable) { if (rule.isAlwaysEnabled() || (settings != null && settings.getSelectedGroupingRules().contains(rule.getId()) ) ) { myRulesEnabled.add(rule); } } for (XBreakpointGroupingRule rule : myRulesAvailable) { if (!rule.isAlwaysEnabled()) { myToggleRuleActions.add(new ToggleBreakpointGroupingRuleEnabledAction(rule)); } } } private void saveBreakpointsDialogState() { final XBreakpointsDialogState dialogState = new XBreakpointsDialogState(); saveTreeState(dialogState); final List<XBreakpointGroupingRule> rulesEnabled = ContainerUtil.filter(myRulesEnabled, new Condition<XBreakpointGroupingRule>() { @Override public boolean value(XBreakpointGroupingRule rule) { return !rule.isAlwaysEnabled(); } }); dialogState.setSelectedGroupingRules(new HashSet<>(ContainerUtil.map(rulesEnabled, new Function<XBreakpointGroupingRule, String>() { @Override public String fun(XBreakpointGroupingRule rule) { return rule.getId(); } }))); getBreakpointManager().setBreakpointsDialogSettings(dialogState); } private void saveTreeState(XBreakpointsDialogState state) { JTree tree = myTreeController.getTreeView(); state.setTreeState(TreeState.createOn(tree, (DefaultMutableTreeNode)tree.getModel().getRoot())); } @Override protected void dispose() { saveCurrentItem(); Disposer.dispose(myListenerDisposable); saveBreakpointsDialogState(); disposeItems(); super.dispose(); } private void disposeItems() { for (BreakpointItem item : myBreakpointItems) { item.dispose(); } } @Nullable @Override protected String getHelpId() { return "reference.dialogs.breakpoints"; } private void saveCurrentItem() { ItemWrapper item = myDetailController.getSelectedItem(); if (item instanceof BreakpointItem) { ((BreakpointItem)item).saveState(); } } private class AddXBreakpointAction extends AnAction implements DumbAware { private final XBreakpointType<?, ?> myType; public AddXBreakpointAction(XBreakpointType<?, ?> type) { myType = type; getTemplatePresentation().setIcon(type.getEnabledIcon()); getTemplatePresentation().setText(type.getTitle()); } @Override public void actionPerformed(AnActionEvent e) { saveCurrentItem(); XBreakpoint<?> breakpoint = myType.addBreakpoint(myProject, null); if (breakpoint != null) { selectBreakpoint(breakpoint); } } } private boolean selectBreakpoint(Object breakpoint) { if (breakpoint != null) { for (BreakpointItem item : myBreakpointItems) { if (item.getBreakpoint() == breakpoint) { myTreeController.selectBreakpointItem(item, null); return true; } } } return false; } private class MoveToGroupAction extends AnAction { private final String myGroup; private final boolean myNewGroup; private MoveToGroupAction(String group) { super(group == null ? "<no group>" : group); myGroup = group; myNewGroup = false; } private MoveToGroupAction() { super("Create new..."); myNewGroup = true; myGroup = null; } @Override public void actionPerformed(AnActionEvent e) { String groupName = myGroup; if (myNewGroup) { groupName = Messages.showInputDialog("New group name", "New Group", AllIcons.Nodes.NewFolder); if (groupName == null) { return; } } for (BreakpointItem item : myTreeController.getSelectedBreakpoints(true)) { Object breakpoint = item.getBreakpoint(); if (breakpoint instanceof XBreakpointBase) { ((XBreakpointBase)breakpoint).setGroup(groupName); } } myTreeController.rebuildTree(myBreakpointItems); } } private class SetAsDefaultGroupAction extends AnAction { private final String myName; private SetAsDefaultGroupAction(XBreakpointCustomGroup group) { super(group.isDefault() ? "Unset as default" : "Set as default"); myName = group.isDefault() ? null : group.getName(); } @Override public void actionPerformed(AnActionEvent e) { getBreakpointManager().setDefaultGroup(myName); myTreeController.rebuildTree(myBreakpointItems); } } private class EditDescriptionAction extends AnAction { private final XBreakpointBase myBreakpoint; private EditDescriptionAction(XBreakpointBase breakpoint) { super("Edit description"); myBreakpoint = breakpoint; } @Override public void actionPerformed(AnActionEvent e) { String description = Messages.showInputDialog("", "Edit Description", null, myBreakpoint.getUserDescription(), null); if (description == null) { return; } myBreakpoint.setUserDescription(description); myTreeController.rebuildTree(myBreakpointItems); } } }
/* Copyright (c) 2008-2013, Avian Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. There is NO WARRANTY for this software. See license.txt for details. */ package java.util; import java.lang.reflect.Array; public class Arrays { private Arrays() { } public static String toString(Object[] a) { return asList(a).toString(); } public static String toString(boolean[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(byte[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(short[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(int[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(long[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(float[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } public static String toString(double[] a) { if (a == null) { return "null"; } else { StringBuilder sb = new StringBuilder(); sb.append("["); for (int i = 0; i < a.length; ++i) { sb.append(String.valueOf(a[i])); if (i + 1 != a.length) { sb.append(", "); } } sb.append("]"); return sb.toString(); } } private static boolean equal(Object a, Object b) { return (a == null && b == null) || (a != null && a.equals(b)); } public static void sort(Object[] array) { sort(array, new Comparator() { public int compare(Object a, Object b) { return ((Comparable) a).compareTo(b); } }); } private final static int SORT_SIZE_THRESHOLD = 16; public static <T> void sort(T[] array, Comparator<? super T> comparator) { introSort(array, comparator, 0, array.length, array.length); insertionSort(array, comparator); } private static <T > void introSort(T[] array, Comparator<? super T> comparator, int begin, int end, int limit) { while (end - begin > SORT_SIZE_THRESHOLD) { if (limit == 0) { heapSort(array, comparator, begin, end); return; } limit >>= 1; // median of three T a = array[begin]; T b = array[begin + (end - begin) / 2 + 1]; T c = array[end - 1]; T median; if (comparator.compare(a, b) < 0) { median = comparator.compare(b, c) < 0 ? b : (comparator.compare(a, c) < 0 ? c : a); } else { median = comparator.compare(b, c) > 0 ? b : (comparator.compare(a, c) > 0 ? c : a); } // partition int pivot, i = begin, j = end; for (;;) { while (comparator.compare(array[i], median) < 0) { ++i; } --j; while (comparator.compare(median, array[j]) < 0) { --j; } if (i >= j) { pivot = i; break; } T swap = array[i]; array[i] = array[j]; array[j] = swap; ++i; } introSort(array, comparator, pivot, end, limit); end = pivot; } } private static <T> void heapSort(T[] array, Comparator<? super T> comparator, int begin, int end) { int count = end - begin; for (int i = count / 2 - 1; i >= 0; --i) { siftDown(array, comparator, i, count, begin); } for (int i = count - 1; i > 0; --i) { // swap begin and begin + i T swap = array[begin + i]; array[begin + i] = array[begin]; array[begin] = swap; siftDown(array, comparator, 0, i, begin); } } private static <T> void siftDown(T[] array, Comparator<? super T> comparator, int i, int count, int offset) { T value = array[offset + i]; while (i < count / 2) { int child = 2 * i + 1; if (child + 1 < count && comparator.compare(array[child], array[child + 1]) < 0) { ++child; } if (comparator.compare(value, array[child]) >= 0) { break; } array[offset + i] = array[offset + child]; i = child; } array[offset + i] = value; } private static <T> void insertionSort(T[] array, Comparator<? super T> comparator) { for (int j = 1; j < array.length; ++j) { T t = array[j]; int i = j - 1; while (i >= 0 && comparator.compare(array[i], t) > 0) { array[i + 1] = array[i]; i = i - 1; } array[i + 1] = t; } } public static int hashCode(Object[] array) { if(array == null) { return 9023; } int hc = 823347; for(Object o : array) { hc += o != null ? o.hashCode() : 54267; hc *= 3; } return hc; } public static boolean equals(Object[] a, Object[] b) { if(a == b) { return true; } if(a == null || b == null) { return false; } if(a.length != b.length) { return false; } for(int i = 0; i < a.length; i++) { if(!equal(a[i], b[i])) { return false; } } return true; } public static boolean equals(byte[] a, byte[] b) { if(a == b) { return true; } if(a == null || b == null) { return false; } if(a.length != b.length) { return false; } for(int i = 0; i < a.length; i++) { if(a[i] != b[i]) { return false; } } return true; } public static <T> List<T> asList(final T ... array) { return new AbstractList<T>() { public int size() { return array.length; } public void add(int index, T element) { throw new UnsupportedOperationException(); } public int indexOf(Object element) { for (int i = 0; i < array.length; ++i) { if (equal(element, array[i])) { return i; } } return -1; } public int lastIndexOf(Object element) { for (int i = array.length - 1; i >= 0; --i) { if (equal(element, array[i])) { return i; } } return -1; } public T get(int index) { return array[index]; } public T set(int index, T value) { throw new UnsupportedOperationException(); } public T remove(int index) { throw new UnsupportedOperationException(); } public ListIterator<T> listIterator(int index) { return new Collections.ArrayListIterator(this, index); } }; } public static void fill(int[] array, int value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(char[] array, char value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(short[] array, short value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(byte[] array, byte value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(boolean[] array, boolean value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(long[] array, long value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(float[] array, float value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static void fill(double[] array, double value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static <T> void fill(T[] array, T value) { for (int i=0;i<array.length;i++) { array[i] = value; } } public static boolean[] copyOf(boolean[] array, int newLength) { boolean[] result = new boolean[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static byte[] copyOf(byte[] array, int newLength) { byte[] result = new byte[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static char[] copyOf(char[] array, int newLength) { char[] result = new char[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static double[] copyOf(double[] array, int newLength) { double[] result = new double[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static float[] copyOf(float[] array, int newLength) { float[] result = new float[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static int[] copyOf(int[] array, int newLength) { int[] result = new int[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static long[] copyOf(long[] array, int newLength) { long[] result = new long[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static short[] copyOf(short[] array, int newLength) { short[] result = new short[newLength]; int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static <T> T[] copyOf(T[] array, int newLength) { Class<?> clazz = array.getClass().getComponentType(); T[] result = (T[])Array.newInstance(clazz, newLength); int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } public static <T, U> T[] copyOf(U[] array, int newLength, Class<? extends T[]> newType) { T[] result = (T[])Array.newInstance(newType.getComponentType(), newLength); int length = array.length > newLength ? newLength : array.length; System.arraycopy(array, 0, result, 0, length); return result; } }
package ru.stqa.pft.addressbook.model; import com.google.gson.annotations.Expose; import com.thoughtworks.xstream.annotations.XStreamAlias; import com.thoughtworks.xstream.annotations.XStreamOmitField; import org.hibernate.annotations.Type; import javax.persistence.*; import java.io.File; import java.util.HashSet; import java.util.Set; @XStreamAlias("contact") @Entity @Table(name = "addressbook") public class ContactData { @Id @Column(name = "id") @XStreamOmitField private int id = Integer.MAX_VALUE; @Expose @Column(name = "firstname") private String firstname; @Expose @Column(name = "lastname") private String lastname; @Expose @Column(name = "address") @Type(type = "text") private String address; @Expose @Column(name = "home") @Type(type = "text") private String homePhone; @Expose @Column(name = "mobile") @Type(type = "text") private String mobilePhone; @Expose @Column(name = "work") @Type(type = "text") private String workPhone; @Expose @Column(name = "email") @Type(type = "text") private String email; @Expose @Column(name = "email2") @Type(type = "text") private String email2; @Expose @Column(name = "email3") @Type(type = "text") private String email3; @Transient private String allPhones; @Transient private String allEmail; @Override public String toString() { return "ContactData{" + "id=" + id + ", firstname='" + firstname + '\'' + ", lastname='" + lastname + '\'' + '}'; } @Column(name = "photo") @Type(type = "text") private String photo; @ManyToMany (fetch = FetchType.EAGER ) @JoinTable(name = "address_in_groups", joinColumns = @JoinColumn(name = "id"), inverseJoinColumns = @JoinColumn(name = "group_id")) private Set<GroupData> groups = new HashSet<GroupData>(); public File getPhoto() { return new File(photo); } public ContactData withPhoto (File photo) { this.photo = photo.getPath(); return this; } public String getAllEmail() { return allEmail; } public ContactData withAllEmail(String allEmail) { this.allEmail = allEmail; return this; } public void getEmail(String email) { this.email = email; } public String getEmail2() { return email2; } public ContactData withEmail2(String email2) { this.email2 = email2; return this; } public String getEmail3() { return email3; } public ContactData withEmail3(String email3) { this.email3 = email3; return this; } public String getAllPhones() { return allPhones; } public ContactData withAllPhones(String allPhones) { this.allPhones = allPhones; return this; } public String getFirstname() { return firstname; } public int getId() { return id; } public ContactData withId(int id) { this.id = id; return this; } public ContactData withFirstname(String firstname) { this.firstname = firstname; return this; } public ContactData withLastname(String lastname) { this.lastname = lastname; return this; } public ContactData withAddress(String address) { this.address = address; return this; } public ContactData withWorkPhone(String workPhone) { this.workPhone = workPhone; return this; } public ContactData withEmail(String email) { this.email = email; return this; } public ContactData withHomePhone(String homePhone) { this.homePhone = homePhone; return this; } public ContactData withMobilePhone(String mobilePhone) { this.mobilePhone = mobilePhone; return this; } public String getLastname() { return lastname; } public String getAddress() { return address; } public String getMobilePhone() { return mobilePhone; } public String getWorkPhone() { return workPhone; } public String getHomePhone() { return homePhone; } public String getEmail() { return email; } public Groups getGroups() { return new Groups(groups); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ContactData that = (ContactData) o; if (id != that.id) return false; if (firstname != null ? !firstname.equals(that.firstname) : that.firstname != null) return false; return lastname != null ? lastname.equals(that.lastname) : that.lastname == null; } @Override public int hashCode() { int result = id; result = 31 * result + (firstname != null ? firstname.hashCode() : 0); result = 31 * result + (lastname != null ? lastname.hashCode() : 0); return result; } public ContactData inGroup(GroupData group) { groups.add(group); return this; } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.org.json; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import junit.framework.TestCase; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; public class ParsingTest extends TestCase { public void testParsingNoObjects() { try { new JSONTokener("").nextValue(); fail(); } catch (JSONException e) { } } public void testParsingLiterals() throws JSONException { assertParsed(Boolean.TRUE, "true"); assertParsed(Boolean.FALSE, "false"); assertParsed(JSONObject.NULL, "null"); assertParsed(JSONObject.NULL, "NULL"); assertParsed(Boolean.FALSE, "False"); assertParsed(Boolean.TRUE, "truE"); } public void testParsingQuotedStrings() throws JSONException { assertParsed("abc", "\"abc\""); assertParsed("123", "\"123\""); assertParsed("foo\nbar", "\"foo\\nbar\""); assertParsed("foo bar", "\"foo\\u0020bar\""); assertParsed("\"{}[]/\\:,=;#", "\"\\\"{}[]/\\\\:,=;#\""); } public void testParsingSingleQuotedStrings() throws JSONException { assertParsed("abc", "'abc'"); assertParsed("123", "'123'"); assertParsed("foo\nbar", "'foo\\nbar'"); assertParsed("foo bar", "'foo\\u0020bar'"); assertParsed("\"{}[]/\\:,=;#", "'\\\"{}[]/\\\\:,=;#'"); } public void testParsingUnquotedStrings() throws JSONException { assertParsed("abc", "abc"); assertParsed("123abc", "123abc"); assertParsed("123e0x", "123e0x"); assertParsed("123e", "123e"); assertParsed("123ee21", "123ee21"); assertParsed("0xFFFFFFFFFFFFFFFFF", "0xFFFFFFFFFFFFFFFFF"); } /** * Unfortunately the original implementation attempts to figure out what * Java number type best suits an input value. */ public void testParsingNumbersThatAreBestRepresentedAsLongs() throws JSONException { assertParsed(9223372036854775807L, "9223372036854775807"); assertParsed(9223372036854775806L, "9223372036854775806"); assertParsed(-9223372036854775808L, "-9223372036854775808"); assertParsed(-9223372036854775807L, "-9223372036854775807"); } public void testParsingNumbersThatAreBestRepresentedAsIntegers() throws JSONException { assertParsed(0, "0"); assertParsed(5, "5"); assertParsed(-2147483648, "-2147483648"); assertParsed(2147483647, "2147483647"); } public void testParsingNegativeZero() throws JSONException { assertParsed(0, "-0"); } public void testParsingIntegersWithAdditionalPrecisionYieldDoubles() throws JSONException { assertParsed(1d, "1.00"); assertParsed(1d, "1.0"); assertParsed(0d, "0.0"); assertParsed(-0d, "-0.0"); } public void testParsingNumbersThatAreBestRepresentedAsDoubles() throws JSONException { assertParsed(9.223372036854776E18, "9223372036854775808"); assertParsed(-9.223372036854776E18, "-9223372036854775809"); assertParsed(1.7976931348623157E308, "1.7976931348623157e308"); assertParsed(2.2250738585072014E-308, "2.2250738585072014E-308"); assertParsed(4.9E-324, "4.9E-324"); assertParsed(4.9E-324, "4.9e-324"); } public void testParsingOctalNumbers() throws JSONException { assertParsed(5, "05"); assertParsed(8, "010"); assertParsed(1046, "02026"); } public void testParsingHexNumbers() throws JSONException { assertParsed(5, "0x5"); assertParsed(16, "0x10"); assertParsed(8230, "0x2026"); assertParsed(180150010, "0xABCDEFA"); assertParsed(2077093803, "0x7BCDEFAB"); } public void testParsingLargeHexValues() throws JSONException { assertParsed(Integer.MAX_VALUE, "0x7FFFFFFF"); String message = "Hex values are parsed as Strings if their signed " + "value is greater than Integer.MAX_VALUE."; assertParsed(message, 0x80000000L, "0x80000000"); } public void testParsingWithCommentsAndWhitespace() throws JSONException { assertParsed("baz", " // foo bar \n baz"); assertParsed("baz", " // foo bar \r baz"); assertParsed("baz", " // foo bar \r\n baz"); assertParsed("baz", " # foo bar \n baz"); assertParsed("baz", " # foo bar \r baz"); assertParsed("baz", " # foo bar \r\n baz"); assertParsed(5, " /* foo bar \n baz */ 5"); assertParsed(5, " /* foo bar \n baz */ 5 // quux"); assertParsed(5, " 5 "); assertParsed(5, " 5 \r\n\t "); assertParsed(5, "\r\n\t 5 "); } public void testParsingArrays() throws JSONException { assertParsed(array(), "[]"); assertParsed(array(5, 6, true), "[5,6,true]"); assertParsed(array(5, 6, array()), "[5,6,[]]"); assertParsed(array(5, 6, 7), "[5;6;7]"); assertParsed(array(5, 6, 7), "[5 , 6 \t; \r\n 7\n]"); assertParsed(array(5, 6, 7, null), "[5,6,7,]"); assertParsed(array(null, null), "[,]"); assertParsed(array(5, null, null, null, 5), "[5,,,,5]"); assertParsed(array(null, 5), "[,5]"); assertParsed(array(null, null, null), "[,,]"); assertParsed(array(null, null, null, 5), "[,,,5]"); } public void testParsingObjects() throws JSONException { assertParsed(object("foo", 5), "{\"foo\": 5}"); assertParsed(object("foo", 5), "{foo: 5}"); assertParsed(object("foo", 5, "bar", "baz"), "{\"foo\": 5, \"bar\": \"baz\"}"); assertParsed(object("foo", 5, "bar", "baz"), "{\"foo\": 5; \"bar\": \"baz\"}"); assertParsed(object("foo", 5, "bar", "baz"), "{\"foo\"= 5; \"bar\"= \"baz\"}"); assertParsed(object("foo", 5, "bar", "baz"), "{\"foo\"=> 5; \"bar\"=> \"baz\"}"); assertParsed(object("foo", object(), "bar", array()), "{\"foo\"=> {}; \"bar\"=> []}"); assertParsed(object("foo", object("foo", array(5, 6))), "{\"foo\": {\"foo\": [5, 6]}}"); assertParsed(object("foo", object("foo", array(5, 6))), "{\"foo\":\n\t{\t \"foo\":[5,\r6]}}"); } public void testSyntaxProblemUnterminatedObject() { assertParseFail("{"); assertParseFail("{\"foo\""); assertParseFail("{\"foo\":"); assertParseFail("{\"foo\":bar"); assertParseFail("{\"foo\":bar,"); assertParseFail("{\"foo\":bar,\"baz\""); assertParseFail("{\"foo\":bar,\"baz\":"); assertParseFail("{\"foo\":bar,\"baz\":true"); assertParseFail("{\"foo\":bar,\"baz\":true,"); } public void testSyntaxProblemEmptyString() { assertParseFail(""); } public void testSyntaxProblemUnterminatedArray() { assertParseFail("["); assertParseFail("[,"); assertParseFail("[,,"); assertParseFail("[true"); assertParseFail("[true,"); assertParseFail("[true,,"); } public void testSyntaxProblemMalformedObject() { assertParseFail("{:}"); assertParseFail("{\"key\":}"); assertParseFail("{:true}"); assertParseFail("{\"key\":true:}"); assertParseFail("{null:true}"); assertParseFail("{true:true}"); assertParseFail("{0xFF:true}"); } private void assertParseFail(String malformedJson) { try { new JSONTokener(malformedJson).nextValue(); fail("Successfully parsed: \"" + malformedJson + "\""); } catch (JSONException e) { } catch (StackOverflowError e) { fail("Stack overflowed on input: \"" + malformedJson + "\""); } } private JSONArray array(Object... elements) { return new JSONArray(Arrays.asList(elements)); } private JSONObject object(Object... keyValuePairs) throws JSONException { JSONObject result = new JSONObject(); for (int i = 0; i < keyValuePairs.length; i+=2) { result.put((String) keyValuePairs[i], keyValuePairs[i+1]); } return result; } private void assertParsed(String message, Object expected, String json) throws JSONException { Object actual = new JSONTokener(json).nextValue(); actual = canonicalize(actual); expected = canonicalize(expected); assertEquals("For input \"" + json + "\" " + message, expected, actual); } private void assertParsed(Object expected, String json) throws JSONException { assertParsed("", expected, json); } /** * Since they don't implement equals or hashCode properly, this recursively * replaces JSONObjects with an equivalent HashMap, and JSONArrays with the * equivalent ArrayList. */ private Object canonicalize(Object input) throws JSONException { if (input instanceof JSONArray) { JSONArray array = (JSONArray) input; List<Object> result = new ArrayList<Object>(); for (int i = 0; i < array.length(); i++) { result.add(canonicalize(array.opt(i))); } return result; } else if (input instanceof JSONObject) { JSONObject object = (JSONObject) input; Map<String, Object> result = new HashMap<String, Object>(); for (Iterator<?> i = object.keys(); i.hasNext(); ) { String key = (String) i.next(); result.put(key, canonicalize(object.get(key))); } return result; } else if (input == null || input.equals(JSONObject.NULL)) { return JSONObject.NULL; } else { return input; } } }
// // Copyright 2018 SenX S.A.S. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package io.warp10.script; import io.warp10.script.WarpScriptStack.Macro; import io.warp10.script.functions.INCLUDE; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.JarURLConnection; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.atomic.AtomicBoolean; import java.util.jar.JarEntry; import java.util.jar.JarFile; import sun.net.www.protocol.file.FileURLConnection; import com.google.common.base.Charsets; /** * Macro library built by adding macros from various files, loaded from a root directory * or from the classpath * * TODO(hbs): add support for secure script (the keystore is not initialized) */ public class WarpScriptMacroLibrary { private static final Map<String,Macro> macros = new HashMap<String, Macro>(); public static void addJar(String path) throws WarpScriptException { addJar(path, null); } private static void addJar(String path, String resource) throws WarpScriptException { // // Exract basename of path // File f = new File(path); if (!f.exists() || !f.isFile()) { throw new WarpScriptException("File not found " + f.getAbsolutePath()); } JarFile jar = null; try { String basename = f.getName(); jar = new JarFile(f); Enumeration<JarEntry> entries = jar.entries(); while(entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); if (entry.isDirectory()) { continue; } String name = entry.getName(); if (!name.endsWith(WarpScriptMacroRepository.WARPSCRIPT_FILE_EXTENSION)) { continue; } if (null != resource && !resource.equals(name)) { continue; } name = name.substring(0, name.length() - WarpScriptMacroRepository.WARPSCRIPT_FILE_EXTENSION.length()); InputStream in = jar.getInputStream(entry); Macro macro = loadMacro(jar, in, name); // // Store resulting macro under 'name' // // Make macro a secure one macro.setSecure(true); macros.put(name, macro); } } catch (IOException ioe) { throw new WarpScriptException("Encountered error while loading " + f.getAbsolutePath(), ioe); } finally { if (null != jar) { try { jar.close(); } catch (IOException ioe) {} } } } public static Macro loadMacro(Object root, InputStream in, String name) throws WarpScriptException { try { byte[] buf = new byte[8192]; StringBuilder sb = new StringBuilder(); ByteArrayOutputStream out = new ByteArrayOutputStream(); while(true) { int len = in.read(buf); if (len < 0) { break; } out.write(buf, 0, len); } in.close(); byte[] data = out.toByteArray(); sb.setLength(0); sb.append(" "); sb.append(new String(data, Charsets.UTF_8)); sb.append("\n"); MemoryWarpScriptStack stack = new MemoryWarpScriptStack(null, null, new Properties()); stack.maxLimits(); stack.setAttribute(WarpScriptStack.ATTRIBUTE_MACRO_NAME, name); // // Add 'INCLUDE' // AtomicBoolean enabled = new AtomicBoolean(true); final INCLUDE include = root instanceof File ? new INCLUDE("INCLUDE", (File) root, enabled) : new INCLUDE("INCLUDE", (JarFile) root, enabled); stack.define("INCLUDE", new Macro() { public boolean isSecure() { return true; } public java.util.List<Object> statements() { return new ArrayList<Object>() {{ add(include); }}; } } ); // // Execute the code // stack.execMulti(sb.toString()); // // Disable INCLUDE // enabled.set(false); // // Ensure the resulting stack is one level deep and has a macro on top // if (1 != stack.depth()) { throw new WarpScriptException("Stack depth was not 1 after the code execution."); } if (!(stack.peek() instanceof Macro)) { throw new WarpScriptException("No macro was found on top of the stack."); } Macro macro = (Macro) stack.pop(); macro.setSecure(true); macro.setName(name); return macro; } catch (IOException ioe) { throw new WarpScriptException(ioe); } finally { try { in.close(); } catch (IOException ioe) {} } } public static Macro find(String name) throws WarpScriptException { Macro macro = (Macro) macros.get(name); // // The macro is not (yet) known, we will attempt to load it from the // classpath // if (null == macro) { String rsc = name + WarpScriptMacroRepository.WARPSCRIPT_FILE_EXTENSION; URL url = WarpScriptMacroLibrary.class.getClassLoader().getResource(rsc); if (null != url) { try { URLConnection conn = url.openConnection(); if (conn instanceof JarURLConnection) { // // This case is when the requested macro is in a jar // final JarURLConnection connection = (JarURLConnection) url.openConnection(); final URL fileurl = connection.getJarFileURL(); File f = new File(fileurl.toURI()); addJar(f.getAbsolutePath(), rsc); macro = (Macro) macros.get(name); } else if (conn instanceof FileURLConnection) { // // This case is when the requested macro is in the classpath but not in a jar. // In this case we do not cache the parsed macro, allowing for dynamic modification. // String urlstr = url.toString(); File root = new File(urlstr.substring(0, urlstr.length() - name.length() - WarpScriptMacroRepository.WARPSCRIPT_FILE_EXTENSION.length())); macro = loadMacro(root, conn.getInputStream(), name); } } catch (URISyntaxException use) { throw new WarpScriptException("Error while loading '" + name + "'", use); } catch (IOException ioe) { throw new WarpScriptException("Error while loading '" + name + "'", ioe); } } } return macro; } }
package jef.database.jsqlparser.test.select; import jef.database.jsqlparser.expression.BinaryExpression; import jef.database.jsqlparser.expression.Column; import jef.database.jsqlparser.expression.DoubleValue; import jef.database.jsqlparser.expression.Function; import jef.database.jsqlparser.expression.LongValue; import jef.database.jsqlparser.expression.StringValue; import jef.database.jsqlparser.expression.Table; import jef.database.jsqlparser.expression.TimeValue; import jef.database.jsqlparser.expression.TimestampValue; import jef.database.jsqlparser.expression.operators.arithmetic.Multiplication; import jef.database.jsqlparser.expression.operators.relational.EqualsTo; import jef.database.jsqlparser.expression.operators.relational.GreaterThan; import jef.database.jsqlparser.expression.operators.relational.InExpression; import jef.database.jsqlparser.expression.operators.relational.LikeExpression; import jef.database.jsqlparser.parser.ParseException; import jef.database.jsqlparser.statement.select.AllTableColumns; import jef.database.jsqlparser.statement.select.Join; import jef.database.jsqlparser.statement.select.OrderByElement; import jef.database.jsqlparser.statement.select.PlainSelect; import jef.database.jsqlparser.statement.select.Select; import jef.database.jsqlparser.statement.select.SelectExpressionItem; import jef.database.jsqlparser.statement.select.Union; import jef.database.jsqlparser.util.deparser.ExpressionDeParser; import jef.database.jsqlparser.util.deparser.SelectDeParser; import jef.database.jsqlparser.util.deparser.StatementDeParser; import jef.database.jsqlparser.visitor.Statement; import junit.framework.TestCase; import org.junit.Ignore; import org.junit.Test; public class SelectTest extends TestCase { public SelectTest(String arg0) { super(arg0); } public void testLimit() throws ParseException { String statement = "select * from mytable where mytable.col = 9 LIMIT 3, ?1"; Select select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(3, ((PlainSelect) select.getSelectBody()).getLimit().getOffset()); assertTrue(((PlainSelect) select.getSelectBody()).getLimit().getOffsetJdbcParameter()==null); assertTrue(((PlainSelect) select.getSelectBody()).getLimit().getRowCountJdbcParameter()!=null); assertFalse(((PlainSelect) select.getSelectBody()).getLimit().isLimitAll()); // toString uses standard syntax statement = "select * from mytable where mytable.col = 9 LIMIT ?1 OFFSET 3"; assertEquals(statement, ""+select); statement = "select * from mytable where mytable.col = 9 LIMIT ?1"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(0, ((PlainSelect) select.getSelectBody()).getLimit().getRowCount()); assertFalse(((PlainSelect) select.getSelectBody()).getLimit().isLimitAll()); assertEquals(statement, select.toString()); statement = "(select * from mytable WHERE mytable.col = 9 LIMIT 10 OFFSET ?1) UNION " + "(select * from mytable2 WHERE mytable2.col = 9 LIMIT 10, ?1) LIMIT 3, 4"; select = (Select) jef.database.DbUtils.parseStatement(statement); Union union = (Union) select.getSelectBody(); assertEquals(3, union.getLimit().getOffset()); assertEquals(4, union.getLimit().getRowCount()); // toString uses standard syntax statement = "(select * from mytable where mytable.col = 9 LIMIT 10 OFFSET ?1)\n UNION " + "(select * from mytable2 where mytable2.col = 9 LIMIT ?1 OFFSET 10) LIMIT 4 OFFSET 3"; assertEquals(statement, ""+select); statement ="(select * from t1 where t1.c1 = 9 LIMIT 4 OFFSET 1)\n UNION ALL"+ " (select * from t1 where t1.c2 = 9 LIMIT 3 OFFSET 1)\n UNION ALL"+ " (select * from t1 where t1.c1 = 9 LIMIT 10 OFFSET 1) LIMIT 4 OFFSET 3"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+select); } @Test public void testLimit2() throws ParseException { String statement = "select * from mytable where mytable.col = 9 LIMIT ?1 , :name"; Select select = (Select) jef.database.DbUtils.parseStatement(statement); System.out.println(select); statement = "select * from mytable where mytable.col = 9 LIMIT 1"; select = (Select) jef.database.DbUtils.parseStatement(statement); System.out.println(select); } public void testTop() throws ParseException { String statement = "select TOP 3 * from mytable where mytable.col = 9"; Select select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(3, ((PlainSelect) select.getSelectBody()).getTop().getRowCount()); statement = "select top 5 foo from bar"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(5, ((PlainSelect) select.getSelectBody()).getTop().getRowCount()); } public void testSelectItems() throws ParseException { String statement = "select myid AS MYID,mycol,tab.*,schema.tab.*,mytab.mycol2,mytab.mycol,mytab.* from mytable where mytable.col = 9"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("MYID", ((SelectExpressionItem) plainSelect.getSelectItems().get(0)).getAlias()); assertEquals("mycol", ((Column) ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getExpression()).getColumnName()); assertEquals("tab", ((AllTableColumns) plainSelect.getSelectItems().get(2)).getTable().getName()); assertEquals("schema", ((AllTableColumns) plainSelect.getSelectItems().get(3)).getTable().getSchemaName()); assertEquals("schema.tab", ((AllTableColumns) plainSelect.getSelectItems().get(3)).getTable().toWholeName()); assertEquals( "mytab.mycol2", ((Column) ((SelectExpressionItem) plainSelect.getSelectItems().get(4)).getExpression()).getWholeColumnName()); assertEquals( "mytab.mycol", ((Column) ((SelectExpressionItem) plainSelect.getSelectItems().get(5)).getExpression()).getWholeColumnName()); assertEquals("mytab", ((AllTableColumns) plainSelect.getSelectItems().get(6)).getTable().toWholeName()); assertEquals(statement, ""+plainSelect); statement = "select myid AS MYID,(select MAX(ID) AS myid2 from mytable2) AS myalias from mytable where mytable.col = 9"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("myalias", ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getAlias()); assertEquals(statement, ""+plainSelect); statement = "select (myid + myid2) AS MYID from mytable where mytable.col = 9"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("MYID", ((SelectExpressionItem) plainSelect.getSelectItems().get(0)).getAlias()); assertEquals(statement, ""+plainSelect); } // @Ignore // public void testUnion() throws ParseException { // String statement = // "select * from mytable where mytable.col = 9 UNION " // + "select * from mytable3 where mytable3.col = ?1 UNION " // + "select * from mytable2 LIMIT 3,4"; // // Union union = (Union) ((Select) jef.database.DbUtils.parseStatement(statement))).getSelectBody(); // assertEquals(3, union.getPlainSelects().size()); // assertEquals("mytable", ((Table) ((PlainSelect) union.getPlainSelects().get(0)).getFromItem()).getName()); // assertEquals("mytable3", ((Table) ((PlainSelect) union.getPlainSelects().get(1)).getFromItem()).getName()); // assertEquals("mytable2", ((Table) ((PlainSelect) union.getPlainSelects().get(2)).getFromItem()).getName()); // assertEquals(3, ((PlainSelect) union.getPlainSelects().get(2)).getLimit().getOffset()); // // //use brakets for toString // //use standard limit syntax // String statementToString = // "(select * from mytable where mytable.col = 9) UNION " // + "(select * from mytable3 where mytable3.col = ?1) UNION " // + "(select * from mytable2 LIMIT 4 OFFSET 3)"; // assertEquals(statementToString, ""+union); // } public void testDistinct() throws ParseException { String statement = "select DISTINCT ON (myid) myid,mycol from mytable WHERE mytable.col = 9"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals( "myid", ((Column) ((SelectExpressionItem) plainSelect.getDistinct().getOnSelectItems().get(0)).getExpression()).getColumnName()); assertEquals("mycol", ((Column) ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getExpression()).getColumnName()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); } public void testFrom() throws ParseException { String statement = "select * from mytable as mytable0, mytable1 alias_tab1, mytable2 as alias_tab2, (select * from mytable3) AS mytable4 where mytable.col = 9"; String statementToString = "select * from mytable mytable0, mytable1 alias_tab1, mytable2 alias_tab2, (select * from mytable3) mytable4 where mytable.col = 9"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(3, plainSelect.getJoins().size()); assertEquals("mytable0", ((Table) plainSelect.getFromItem()).getAlias()); assertEquals("alias_tab1", ((Join) plainSelect.getJoins().get(0)).getRightItem().getAlias()); assertEquals("alias_tab2", ((Join) plainSelect.getJoins().get(1)).getRightItem().getAlias()); assertEquals("mytable4", ((Join) plainSelect.getJoins().get(2)).getRightItem().getAlias()); assertEquals(statementToString.toUpperCase(), plainSelect.toString().toUpperCase()); } public void testJoin() throws ParseException { String statement = "select * from tab1 LEFT outer JOIN tab2 ON tab1.id = tab2.id"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(1, plainSelect.getJoins().size()); assertEquals("tab2", ((Table) ((Join) plainSelect.getJoins().get(0)).getRightItem()).toWholeName()); assertEquals( "tab1.id", ((Column) ((EqualsTo) ((Join) plainSelect.getJoins().get(0)).getOnExpression()).getLeftExpression()).getWholeColumnName()); assertTrue(((Join) plainSelect.getJoins().get(0)).isOuter()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select * from tab1 LEFT outer JOIN tab2 ON tab1.id = tab2.id INNER JOIN tab3"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(2, plainSelect.getJoins().size()); assertEquals("tab3", ((Table) ((Join) plainSelect.getJoins().get(1)).getRightItem()).toWholeName()); assertFalse(((Join) plainSelect.getJoins().get(1)).isOuter()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select * from tab1 LEFT outer JOIN tab2 ON tab1.id = tab2.id JOIN tab3"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(2, plainSelect.getJoins().size()); assertEquals("tab3", ((Table) ((Join) plainSelect.getJoins().get(1)).getRightItem()).toWholeName()); assertFalse(((Join) plainSelect.getJoins().get(1)).isOuter()); // implicit INNER statement = "select * from tab1 LEFT outer JOIN tab2 ON tab1.id = tab2.id INNER JOIN tab3"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement= "select * from TA2 LEFT outer JOIN O USING (col1,col2) where D.OasSD = 'asdf' And (kj >= 4 OR l < 'sdf')"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select * from tab1 INNER JOIN tab2 USING (id,id2)"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(1, plainSelect.getJoins().size()); assertEquals("tab2", ((Table) ((Join) plainSelect.getJoins().get(0)).getRightItem()).toWholeName()); assertFalse(((Join) plainSelect.getJoins().get(0)).isOuter()); assertEquals(2, ((Join) plainSelect.getJoins().get(0)).getUsingColumns().size()); assertEquals("id2", ((Column) ((Join) plainSelect.getJoins().get(0)).getUsingColumns().get(1)).getWholeColumnName()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select * from tab1 RIGHT OUTER JOIN tab2 USING (id,id2)"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select * from foo f LEFT INNER JOIN (bar b RIGHT OUTER JOIN baz z ON f.id = z.id) ON f.id = b.id"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); } public void testFunctions() throws ParseException { String statement = "select MAX(id) as max from mytable WHERE mytable.col = 9"; PlainSelect select = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("max", ((SelectExpressionItem) select.getSelectItems().get(0)).getAlias()); assertEquals(statement.toUpperCase(), select.toString().toUpperCase()); statement = "select MAX(id),AVG(pro) as myavg from mytable WHERE mytable.col = 9 GROUP BY pro"; select = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("myavg", ((SelectExpressionItem) select.getSelectItems().get(1)).getAlias()); assertEquals(statement.toUpperCase(), select.toString().toUpperCase()); statement = "select MAX(a,b,c),COUNT(*),D from tab1 GROUP BY D"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); Function fun = (Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(0)).getExpression(); assertEquals("MAX", fun.getName()); assertEquals("b", ((Column)fun.getParameters().getExpressions().get(1)).getWholeColumnName()); assertTrue(((Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getExpression()).isAllColumns()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select {fn MAX(a,b,c)},COUNT(*),D from tab1 GROUP BY D"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); fun = (Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(0)).getExpression(); assertTrue(fun.isEscaped()); assertEquals("MAX", fun.getName()); assertEquals("b", ((Column)fun.getParameters().getExpressions().get(1)).getWholeColumnName()); assertTrue(((Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getExpression()).isAllColumns()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); statement = "select ab.MAX(a,b,c),cd.COUNT(*),D from tab1 GROUP BY D"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); fun = (Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(0)).getExpression(); assertEquals("ab.MAX", fun.getName()); assertEquals("b", ((Column)fun.getParameters().getExpressions().get(1)).getWholeColumnName()); fun = (Function) ((SelectExpressionItem) plainSelect.getSelectItems().get(1)).getExpression(); assertEquals("cd.COUNT", fun.getName()); assertTrue(fun.isAllColumns()); assertEquals(statement.toUpperCase(), plainSelect.toString().toUpperCase()); } public void testWhere() throws ParseException { String statement = "select * from tab1 where "; String whereToString = "(a + b + c / d + e * f) * (a / b * (a + b)) > ?1"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement + whereToString)).getSelectBody(); assertTrue(plainSelect.getWhere() instanceof GreaterThan); assertTrue(((GreaterThan) plainSelect.getWhere()).getLeftExpression() instanceof Multiplication); assertEquals(statement+whereToString, ""+plainSelect); ExpressionDeParser expressionDeParser = new ExpressionDeParser(); StringBuilder StringBuilder = new StringBuilder(); expressionDeParser.setBuffer(StringBuilder); plainSelect.getWhere().accept(expressionDeParser); assertEquals(whereToString, StringBuilder.toString()); whereToString = "(7 * s + 9 / 3) NOT BETWEEN 3 AND ?1"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement + whereToString)).getSelectBody(); StringBuilder = new StringBuilder(); expressionDeParser.setBuffer(StringBuilder); plainSelect.getWhere().accept(expressionDeParser); assertEquals(whereToString, StringBuilder.toString()); assertEquals(statement+whereToString, ""+plainSelect); whereToString = "a / b NOT IN (?1,'s''adf',234.2)"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement + whereToString)).getSelectBody(); StringBuilder = new StringBuilder(); expressionDeParser.setBuffer(StringBuilder); plainSelect.getWhere().accept(expressionDeParser); assertEquals(whereToString, StringBuilder.toString()); assertEquals(statement+whereToString, ""+plainSelect); whereToString = "NOT 0 = 0"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement + whereToString)).getSelectBody(); String where = " NOT (0 = 0)"; whereToString = "NOT (0 = 0)"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement + whereToString)).getSelectBody(); StringBuilder = new StringBuilder(); expressionDeParser.setBuffer(StringBuilder); plainSelect.getWhere().accept(expressionDeParser); assertEquals(where, StringBuilder.toString()); assertEquals(statement+whereToString, ""+plainSelect); } public void testGroupBy() throws ParseException { String statement = "select * from tab1 where a > 34 group by tab1.b"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(1, plainSelect.getGroupByColumnReferences().size()); assertEquals("tab1.b", ((Column) plainSelect.getGroupByColumnReferences().get(0)).getWholeColumnName()); assertEquals(statement, ""+plainSelect); statement = "select * from tab1 where a > 34 group by 2,3"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(2, plainSelect.getGroupByColumnReferences().size()); assertEquals(2, ((LongValue) plainSelect.getGroupByColumnReferences().get(0)).getValue().longValue()); assertEquals(3, ((LongValue) plainSelect.getGroupByColumnReferences().get(1)).getValue().longValue()); assertEquals(statement, ""+plainSelect); } public void testHaving() throws ParseException { String statement = "select MAX(tab1.b) from tab1 where a > 34 group by tab1.b having MAX(tab1.b) > 56"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertTrue(plainSelect.getHaving() instanceof GreaterThan); assertEquals(statement, ""+plainSelect); statement = "select MAX(tab1.b) from tab1 where a > 34 having MAX(tab1.b) IN (56,32,3,?1)"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertTrue(plainSelect.getHaving() instanceof InExpression); assertEquals(statement, ""+plainSelect); } public void testExists() throws ParseException { String statement = "select * from tab1 where"; String where = " EXISTS (select * from tab2)"; statement += where; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); PlainSelect plainSelect = (PlainSelect) ((Select) parsed).getSelectBody(); ExpressionDeParser expressionDeParser = new ExpressionDeParser(); StringBuilder StringBuilder = new StringBuilder(); expressionDeParser.setBuffer(StringBuilder); SelectDeParser deParser = new SelectDeParser(expressionDeParser, StringBuilder); expressionDeParser.setSelectVisitor(deParser); plainSelect.getWhere().accept(expressionDeParser); assertEquals(where, StringBuilder.toString()); } public void testOrderBy() throws ParseException { //TODO: should there be a DESC marker in the OrderByElement class? String statement = "select * from tab1 where a > 34 group by tab1.b order by tab1.a DESC,tab1.b ASC"; String statementToString = "select * from tab1 where a > 34 group by tab1.b order by tab1.a DESC,tab1.b"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(2, plainSelect.getOrderBy().getOrderByElements().size()); assertEquals("tab1.a", ((Column) ((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(0)).getExpression()).getWholeColumnName()); assertEquals("b", ((Column) ((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(1)).getExpression()).getColumnName()); assertTrue(((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(1)).isAsc()); assertFalse(((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(0)).isAsc()); assertEquals(statementToString, ""+plainSelect); ExpressionDeParser expressionDeParser = new ExpressionDeParser(); StringBuilder StringBuilder = new StringBuilder(); SelectDeParser deParser = new SelectDeParser(expressionDeParser, StringBuilder); expressionDeParser.setSelectVisitor(deParser); expressionDeParser.setBuffer(StringBuilder); plainSelect.accept(deParser); assertEquals(statement, StringBuilder.toString()); statement = "select * from tab1 where a > 34 group by tab1.b order by tab1.a,2"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals(2, plainSelect.getOrderBy().getOrderByElements().size()); assertEquals("a", ((Column) ((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(0)).getExpression()).getColumnName()); assertEquals(2, ((LongValue) ((OrderByElement) plainSelect.getOrderBy().getOrderByElements().get(1)).getExpression()).getValue().longValue()); assertEquals(statement, ""+plainSelect); } public void testTimestamp() throws ParseException { String statement = "select * from tab1 where a > {ts '2004-04-30 04:05:34.56'}"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("2004-04-30 04:05:34.56", ((TimestampValue)((GreaterThan) plainSelect.getWhere()).getRightExpression()).getValue().toString()); assertEquals(statement, ""+plainSelect); } public void testTime() throws ParseException { String statement = "select * from tab1 where a > {t '04:05:34'}"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("04:05:34", (((TimeValue)((GreaterThan) plainSelect.getWhere()).getRightExpression()).getValue()).toString()); assertEquals(statement, ""+plainSelect); } public void testCase() throws ParseException { String statement = "select a,CASE b WHEN 1 THEN 2 END from tab1"; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE WHEN (a > 2) THEN 3 END) AS b from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE WHEN a > 2 THEN 3 ELSE 4 END) AS b from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE b WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END) from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE " + "WHEN b > 1 THEN 'BBB' " + "WHEN a = 3 THEN 'AAA' " + "END) from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE " + "WHEN b > 1 THEN 'BBB' " + "WHEN a = 3 THEN 'AAA' " + "END) from tab1 " + "where c = (CASE " + "WHEN d <> 3 THEN 5 " + "ELSE 10 " + "END)"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,CASE a " + "WHEN 'b' THEN 'BBB' " + "WHEN 'a' THEN 'AAA' " + "END AS b from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a from tab1 where CASE b WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END > 34"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a from tab1 where CASE b WHEN 1 THEN 2 + 3 ELSE 4 END > 34"; parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); statement = "select a,(CASE " + "WHEN (CASE a WHEN 1 THEN 10 ELSE 20 END) > 15 THEN 'BBB' " + // "WHEN (select c from tab2 WHERE d = 2) = 3 THEN 'AAA' " + "END) from tab1"; parsed = jef.database.DbUtils.parseStatement(statement); //System.out.println(""+statement); //System.out.println(""+parsed); assertEquals(statement, ""+parsed); } public void testReplaceAsFunction() throws ParseException { String statement = "select REPLACE(a,'b',c) from tab1"; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); } public void testLike() throws ParseException { String statement = "select * from tab1 where a LIKE 'test'"; PlainSelect plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("test", (((StringValue)((LikeExpression) plainSelect.getWhere()).getRightExpression()).getValue()).toString()); assertEquals(statement, ""+plainSelect); statement = "select * from tab1 where a LIKE 'test' ESCAPE 'test2'"; plainSelect = (PlainSelect) ((Select) jef.database.DbUtils.parseStatement(statement)).getSelectBody(); assertEquals("test", (((StringValue)((LikeExpression) plainSelect.getWhere()).getRightExpression()).getValue()).toString()); assertEquals("test2", (((LikeExpression) plainSelect.getWhere()).getEscape())); assertEquals(statement, ""+plainSelect); } public void testSelectOrderHaving() throws ParseException { String statement = "select units,count(units) AS num from currency group by units having count(units) > 1 order by num"; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); } public void testDouble() throws ParseException { String statement = "select 1e2, * from mytable WHERE mytable.col = 9"; Select select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(1e2, ((DoubleValue)((SelectExpressionItem)((PlainSelect) select.getSelectBody()).getSelectItems().get(0)).getExpression()).getValue(), 0); statement = "select * from mytable WHERE mytable.col = 1.e2"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(1e2, ((DoubleValue)((BinaryExpression)((PlainSelect) select.getSelectBody()).getWhere()).getRightExpression()).getValue(), 0); statement = "select * from mytable WHERE mytable.col = 1.2e2"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(1.2e2, ((DoubleValue)((BinaryExpression)((PlainSelect) select.getSelectBody()).getWhere()).getRightExpression()).getValue(), 0); statement = "select * from mytable WHERE mytable.col = 2e2"; select = (Select) jef.database.DbUtils.parseStatement(statement); assertEquals(2e2, ((DoubleValue)((BinaryExpression)((PlainSelect) select.getSelectBody()).getWhere()).getRightExpression()).getValue(), 0); } public void testWith() throws ParseException { String statement = "WITH DINFO (DEPTNO,AVGSALARY,EMPCOUNT) AS " + "(select OTHERS.WORKDEPT,AVG(OTHERS.SALARY),COUNT(*) from EMPLOYEE OTHERS " + "group by OTHERS.WORKDEPT), DINFOMAX AS (select MAX(AVGSALARY) AS AVGMAX from DINFO) " + "select THIS_EMP.EMPNO,THIS_EMP.SALARY,DINFO.AVGSALARY,DINFO.EMPCOUNT,DINFOMAX.AVGMAX " + "from EMPLOYEE THIS_EMP INNER JOIN DINFO INNER JOIN DINFOMAX " + "where THIS_EMP.JOB = 'SALESREP' AND THIS_EMP.WORKDEPT = DINFO.DEPTNO"; Select select = (Select) jef.database.DbUtils.parseStatement(statement); Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); } public void testSelectAliasInQuotes() throws ParseException { String statement = "select mycolumn AS \"My Column Name\" from mytable"; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); } public void testSelectJoinWithComma() throws ParseException { String statement = "select cb.Genus,cb.Species from Coleccion_de_Briofitas cb, unigeoestados es " + "where es.nombre = \"Tamaulipas\" AND cb.the_geom = es.geom"; Statement parsed = jef.database.DbUtils.parseStatement(statement); assertEquals(statement, ""+parsed); } public void testDeparser() throws ParseException { String statement = "select a.OWNERLASTNAME,a.OWNERFIRSTNAME " +"from ANTIQUEOWNERS a, ANTIQUES b " +"where b.BUYERID = a.OWNERID AND b.ITEM = 'Chair'"; Statement parsed = jef.database.DbUtils.parseStatement(statement); StatementDeParser deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, deParser.getBuffer().toString()); statement = "select count(DISTINCT f + 4) from a"; parsed = jef.database.DbUtils.parseStatement(statement); deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); statement = "select count(DISTINCT f,g,h) from a"; parsed = jef.database.DbUtils.parseStatement(statement); deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); } public void testMysqlQuote() throws ParseException { String statement = "select `a.OWNERLASTNAME`,`OWNERFIRSTNAME` " +"from `ANTIQUEOWNERS` a, ANTIQUES b " +"where b.BUYERID = a.OWNERID AND b.ITEM = 'Chair'"; Statement parsed = jef.database.DbUtils.parseStatement(statement); StatementDeParser deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); } public void testConcat() throws ParseException { String statement = "select a || b || c + 4 from t"; Statement parsed = jef.database.DbUtils.parseStatement(statement); StatementDeParser deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); } // public void testMatches() throws ParseException { // String statement = "select * from team where team.search_column @@ to_tsquery('new & york & yankees')"; // Statement parsed = jef.database.DbUtils.parseStatement(statement); // StatementDeParser deParser=new StatementDeParser(new StringBuilder()); // parsed.accept(deParser); // // assertEquals(statement, parsed.toString()); // assertEquals(statement, deParser.getBuffer().toString()); // } public void testGroupByExpression() throws ParseException { String statement = "select col1,col2,col1 + col2,sum(col8)" + " from table1 " + "group by col1,col2,col1 + col2"; Statement parsed = jef.database.DbUtils.parseStatement(statement); StatementDeParser deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); } public void testBitwise() throws ParseException { String statement = "select col1 & 32,col2 ^ col1,col1 | col2" + " from table1"; Statement parsed = jef.database.DbUtils.parseStatement(statement); StatementDeParser deParser=new StatementDeParser(new StringBuilder()); parsed.accept(deParser); assertEquals(statement, parsed.toString()); assertEquals(statement, deParser.getBuffer().toString()); } public static void main(String[] args) { //junit.swingui.TestRunner.run(SelectTest.class); } }
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.j2cl.integration.casts; import static com.google.j2cl.integration.testing.Asserts.assertThrowsClassCastException; import static com.google.j2cl.integration.testing.Asserts.assertTrue; import java.io.Serializable; import jsinterop.annotations.JsFunction; import jsinterop.annotations.JsPackage; import jsinterop.annotations.JsType; public class Main { public static void main(String... args) { testCasts_basics(); testCasts_generics(); testCasts_typeVariableWithNativeBound(); testCasts_parameterizedNativeType(); testCasts_exceptionMessages(); testCasts_erasureCastOnThrow(); testCasts_erasureCastOnConversion(); testCasts_notOptimizeable(); testArrayCasts_basics(); testArrayCasts_differentDimensions(); testArrayCasts_sameDimensions(); testArrayCasts_erasureCastsOnArrayAccess_fromArrayOfT(); testArrayCasts_erasureCastsOnArrayAccess_fromT(); testArrayCasts_boxedTypes(); testDevirtualizedCasts_object(); testDevirtualizedCasts_number(); testDevirtualizedCasts_comparable(); testDevirtualizedCasts_charSequence(); testDevirtualizedCasts_void(); } public interface Interface {} private static void testCasts_basics() { Object o = null; String s = (String) o; Serializable serializable = new Serializable() {}; Serializable unusedSerializable = (Serializable) serializable; assertThrowsClassCastException( () -> { RuntimeException unused = (RuntimeException) serializable; }); Interface intf = new Interface() {}; Interface unusedInterface = (Interface) intf; assertThrowsClassCastException( () -> { Serializable unused = (Serializable) intf; }); } private static void testArrayCasts_basics() { // Cast null to Object[] Object o = (Object[]) null; // Cast null to Object[][] o = (Object[][]) null; // Cast JS "[]" to Object[] o = new Object[] {}; // Actually emits as the JS array literal "[]". o = (Object[]) o; // Cast JS "$Arrays.$init([], Object, 2))" to Object[][] o = new Object[][] {}; o = (Object[][]) o; } private static void testArrayCasts_sameDimensions() { Object o = null; Object[] objects = new Object[0]; String[] strings = new String[0]; CharSequence[] charSequences = new CharSequence[0]; o = (Object[]) objects; o = (Object[]) strings; o = (String[]) strings; o = (CharSequence[]) strings; o = (Object[]) charSequences; o = (CharSequence[]) charSequences; assertThrowsClassCastException( () -> { Object unused = (String[]) objects; }, String[].class); assertThrowsClassCastException( () -> { Object unused = (CharSequence[]) objects; }, CharSequence[].class); assertThrowsClassCastException( () -> { Object unused = (String[]) charSequences; }, String[].class); } private static void testArrayCasts_differentDimensions() { Object object = new Object[10][10]; // These are fine. Object[] object1d = (Object[]) object; Object[][] object2d = (Object[][]) object; // A 2d array cannot be cast to a 3d array. assertThrowsClassCastException( () -> { Object[][][] unused = (Object[][][]) object2d; }, Object[][][].class); // A non-array cannot be cast to an array. assertThrowsClassCastException( () -> { Object[] unused = (Object[]) new Object(); }, Object[].class); } private static void testArrayCasts_erasureCastsOnArrayAccess_fromArrayOfT() { // Array of the right type. ArrayContainer<String> stringArrayInArrayContainer = new ArrayContainer<>(new String[1]); String unusedString = stringArrayInArrayContainer.data[0]; int len = stringArrayInArrayContainer.data.length; assertTrue(len == 1); // Array of the wrong type. ArrayContainer<String> objectArrayInArrayContainer = new ArrayContainer<>(new Object[1]); assertThrowsClassCastException( () -> { String unused = objectArrayInArrayContainer.data[0]; }, String[].class); // Make sure access to the length field performs the right cast. The length field // has special handling in CompilationUnitBuider. assertThrowsClassCastException( () -> { int unused = objectArrayInArrayContainer.data.length; }, String[].class); // Not even an array. assertThrowsClassCastException( () -> { ArrayContainer<String> container = new ArrayContainer<>(new Object()); }, Object[].class); } private static class ArrayContainer<T> { ArrayContainer(Object array) { this.data = (T[]) array; } T[] data; } private static void testArrayCasts_erasureCastsOnArrayAccess_fromT() { // Array of the right type. Container<String[]> stringArrayInContainer = new Container<>(new String[1]); String unusedString = stringArrayInContainer.data[0]; int len = stringArrayInContainer.data.length; assertTrue(len == 1); // Array of the wrong type. Container<String[]> objectArrayInContainer = new Container<>(new Object[1]); assertThrowsClassCastException( () -> { String unused = objectArrayInContainer.data[0]; }, String[].class); assertThrowsClassCastException( () -> { int unused = objectArrayInContainer.data.length; }, String[].class); // Not even an array. Container<String[]> notAnArrayInContainer = new Container<>(new Object()); assertThrowsClassCastException( () -> { String unused = notAnArrayInContainer.data[0]; }, String[].class); assertThrowsClassCastException( () -> { int unused = notAnArrayInContainer.data.length; }, String[].class); } private static class Container<T> { Container(Object array) { this.data = (T) array; } T data; } private static void testArrayCasts_boxedTypes() { Object b = new Byte((byte) 1); Byte unusedB = (Byte) b; Number unusedN = (Number) b; castToDoubleException(b); castToFloatException(b); castToIntegerException(b); castToLongException(b); castToShortException(b); castToCharacterException(b); castToBooleanException(b); Object d = new Double(1.0); Double unusedD = (Double) d; unusedN = (Number) d; castToByteException(d); castToFloatException(d); castToIntegerException(d); castToLongException(d); castToShortException(d); castToCharacterException(d); castToBooleanException(d); Object f = new Float(1.0f); Float unusedF = (Float) f; unusedN = (Number) f; castToByteException(f); castToDoubleException(f); castToIntegerException(f); castToLongException(f); castToShortException(f); castToCharacterException(f); castToBooleanException(f); Object i = new Integer(1); Integer unusedI = (Integer) i; unusedN = (Number) i; castToByteException(i); castToDoubleException(i); castToFloatException(i); castToLongException(i); castToShortException(i); castToCharacterException(i); castToBooleanException(i); Object l = new Long(1L); Long unusedL = (Long) l; unusedN = (Number) l; castToByteException(l); castToDoubleException(l); castToFloatException(l); castToIntegerException(l); castToShortException(l); castToCharacterException(l); castToBooleanException(l); Object s = new Short((short) 1); Short unusedS = (Short) s; unusedN = (Number) s; castToByteException(s); castToDoubleException(s); castToFloatException(s); castToIntegerException(s); castToLongException(s); castToCharacterException(s); castToBooleanException(s); Object c = new Character('a'); Character unusedC = (Character) c; castToByteException(c); castToDoubleException(c); castToFloatException(c); castToIntegerException(c); castToLongException(c); castToShortException(c); castToNumberException(c); castToBooleanException(c); Object bool = new Boolean(true); Boolean unusedBool = (Boolean) bool; castToByteException(bool); castToDoubleException(bool); castToFloatException(bool); castToIntegerException(bool); castToLongException(bool); castToShortException(bool); castToNumberException(bool); castToCharacterException(bool); Object sn = new SubNumber(); unusedN = (Number) sn; } private static void castToByteException(Object o) { assertThrowsClassCastException( () -> { Byte b = (Byte) o; }); } private static void castToDoubleException(Object o) { assertThrowsClassCastException( () -> { Double d = (Double) o; }); } private static void castToFloatException(Object o) { assertThrowsClassCastException( () -> { Float f = (Float) o; }); } private static void castToIntegerException(Object o) { assertThrowsClassCastException( () -> { Integer i = (Integer) o; }); } private static void castToLongException(Object o) { assertThrowsClassCastException( () -> { Long l = (Long) o; }); } private static void castToShortException(Object o) { assertThrowsClassCastException( () -> { Short s = (Short) o; }); } private static void castToCharacterException(Object o) { assertThrowsClassCastException( () -> { Character c = (Character) o; }); } private static void castToBooleanException(Object o) { assertThrowsClassCastException( () -> { Boolean b = (Boolean) o; }); } private static void castToNumberException(Object o) { assertThrowsClassCastException( () -> { Number n = (Number) o; }); } private static class SubNumber extends Number { @Override public int intValue() { return 0; } @Override public long longValue() { return 0; } @Override public float floatValue() { return 0; } @Override public double doubleValue() { return 0; } } private static void testDevirtualizedCasts_object() { Object unusedObject = null; // All these casts should succeed. unusedObject = (Object) ""; unusedObject = (Object) new Double(0); unusedObject = (Object) new Boolean(false); unusedObject = (Object) new Object[] {}; } private static void testDevirtualizedCasts_number() { Number unusedNumber = null; // This casts should succeed. unusedNumber = (Number) new Double(0); } private static void testDevirtualizedCasts_comparable() { Comparable<?> unusedComparable = null; // All these casts should succeed. unusedComparable = (Comparable) ""; unusedComparable = (Comparable) new Double(0); unusedComparable = (Comparable) new Boolean(false); } private static void testDevirtualizedCasts_charSequence() { CharSequence unusedCharSequence = null; // This casts should succeed. unusedCharSequence = (CharSequence) ""; } private static void testDevirtualizedCasts_void() { Void unusedVoid = null; // This casts should succeed. unusedVoid = (Void) null; } @SuppressWarnings({"unused", "unchecked"}) private static <T, E extends Number> void testCasts_generics() { Object o = new Integer(1); E e = (E) o; // cast to type variable with bound, casting Integer instance to Number T t = (T) o; // cast to type variable without bound, casting Integer instance to Object assertThrowsClassCastException( () -> { Object error = new Error(); E unused = (E) error; // casting Error instance to Number, exception. }); class Pameterized<T, E extends Number> {} Object c = new Pameterized<Number, Number>(); Pameterized<Error, Number> cc = (Pameterized<Error, Number>) c; // cast to parameterized type. Object[] is = new Integer[1]; Object[] os = new Object[1]; E[] es = (E[]) is; T[] ts = (T[]) is; assertThrowsClassCastException( () -> { E[] ees = (E[]) os; }); T[] tts = (T[]) os; } @JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "Map") private static class NativeMap<K, V> {} @SuppressWarnings({"rawtypes", "unchecked"}) private static <T extends NativeMap<?, ?>> void testCasts_typeVariableWithNativeBound() { { Object o = new Object[] {new Object()}; T[] unusedArray = (T[]) o; // cast to T[]. assertThrowsClassCastException( () -> { T unused = (T) o; }); } { Object o = new NativeMap(); T unused = (T) o; } } @SuppressWarnings({"rawtypes", "unchecked"}) private static void testCasts_parameterizedNativeType() { Object a = new NativeMap<String, Object>(); NativeMap e = (NativeMap) a; assertTrue(e == a); NativeMap<String, Object> f = (NativeMap<String, Object>) a; assertTrue(f == a); assertTrue(a instanceof NativeMap); Object os = new NativeMap[] {e}; NativeMap[] g = (NativeMap[]) os; assertTrue(g[0] == e); NativeMap<String, Object>[] h = (NativeMap<String, Object>[]) os; assertTrue(h[0] == e); assertTrue(os instanceof NativeMap[]); } private static class Foo {} private static class Bar {} @JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "String") private static class Baz {} @JsFunction interface Qux { String m(String s); } @SuppressWarnings("unused") private static void testCasts_exceptionMessages() { Object object = new Foo(); assertThrowsClassCastException( () -> { Bar bar = (Bar) object; }, Bar.class); assertThrowsClassCastException( () -> { Bar[] bars = (Bar[]) object; }, Bar[].class); assertThrowsClassCastException( () -> { String string = (String) object; }, String.class); assertThrowsClassCastException( () -> { Void aVoid = (Void) object; }, Void.class); assertThrowsClassCastException( () -> { Baz baz = (Baz) object; }, "String"); assertThrowsClassCastException( () -> { Qux qux = (Qux) object; }, "<native function>"); } private static void testCasts_erasureCastOnThrow() { assertThrowsClassCastException( () -> { throw returnObjectAsT(new RuntimeException()); }, RuntimeException.class); } private static void testCasts_erasureCastOnConversion() { assertThrowsClassCastException( () -> { int i = (int) returnObjectAsT(new Integer(1)); }, Integer.class); } private static <T> T returnObjectAsT(T unused) { return (T) new Object(); } private static class Holder { public Object f = null; public Holder reset() { f = f instanceof Foo ? new Bar() : new Foo(); return this; } } private static Object staticObject = new Foo(); private static class StaticClass { static Holder a = new Holder(); static { staticObject = new Object(); } } public static void testCasts_notOptimizeable() { assertThrowsClassCastException( () -> { if (staticObject instanceof Foo) { StaticClass.a.f = (Foo) staticObject; } }); Holder h = new Holder(); assertThrowsClassCastException( () -> { if (h.reset().f instanceof Foo) { Foo foo = (Foo) h.reset().f; } }); } }
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.history; import java.util.List; import org.camunda.bpm.engine.ProcessEngineConfiguration; import org.camunda.bpm.engine.history.HistoricIdentityLinkLog; import org.camunda.bpm.engine.history.HistoricIdentityLinkLogQuery; import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase; import org.camunda.bpm.engine.repository.ProcessDefinition; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.task.IdentityLink; import org.camunda.bpm.engine.test.Deployment; import org.camunda.bpm.engine.test.RequiredHistoryLevel; @RequiredHistoryLevel(ProcessEngineConfiguration.HISTORY_FULL) public class HistoricIdentityLinkLogTestByXml extends PluggableProcessEngineTestCase{ private static String PROCESS_DEFINITION_KEY_CANDIDATE_USER = "oneTaskProcessForHistoricIdentityLinkWithCanidateUser"; private static String PROCESS_DEFINITION_KEY_CANDIDATE_GROUP = "oneTaskProcessForHistoricIdentityLinkWithCanidateGroup"; private static String PROCESS_DEFINITION_KEY_ASSIGNEE = "oneTaskProcessForHistoricIdentityLinkWithAssignee"; private static String PROCESS_DEFINITION_KEY_CANDIDATE_STARTER_USER = "oneTaskProcessForHistoricIdentityLinkWithCanidateStarterUsers"; private static String PROCESS_DEFINITION_KEY_CANDIDATE_STARTER_GROUP = "oneTaskProcessForHistoricIdentityLinkWithCanidateStarterGroups"; private static final String XML_USER = "demo"; private static final String XML_GROUP = "demoGroups"; private static final String XML_ASSIGNEE = "assignee"; protected static final String TENANT_ONE = "tenant1"; protected static final String CANDIDATE_STARTER_USER = "org/camunda/bpm/engine/test/api/repository/ProcessDefinitionCandidateTest.testCandidateStarterUser.bpmn20.xml"; protected static final String CANDIDATE_STARTER_USERS = "org/camunda/bpm/engine/test/api/repository/ProcessDefinitionCandidateTest.testCandidateStarterUsers.bpmn20.xml"; protected static final String CANDIDATE_STARTER_GROUP = "org/camunda/bpm/engine/test/api/repository/ProcessDefinitionCandidateTest.testCandidateStarterGroup.bpmn20.xml"; protected static final String CANDIDATE_STARTER_GROUPS = "org/camunda/bpm/engine/test/api/repository/ProcessDefinitionCandidateTest.testCandidateStarterGroups.bpmn20.xml"; @Deployment(resources = { "org/camunda/bpm/engine/test/api/runtime/OneTaskProcessWithCandidateUser.bpmn20.xml" }) public void testShouldAddTaskCandidateforAddIdentityLinkUsingXml() { // Pre test List<HistoricIdentityLinkLog> historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 0); // given startProcessInstance(PROCESS_DEFINITION_KEY_CANDIDATE_USER); historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // query Test HistoricIdentityLinkLogQuery query = historyService.createHistoricIdentityLinkLogQuery(); assertEquals(query.userId(XML_USER).count(), 1); } @Deployment(resources = { "org/camunda/bpm/engine/test/api/runtime/OneTaskProcessWithTaskAssignee.bpmn20.xml" }) public void testShouldAddTaskAssigneeforAddIdentityLinkUsingXml() { // Pre test List<HistoricIdentityLinkLog> historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 0); // given startProcessInstance(PROCESS_DEFINITION_KEY_ASSIGNEE); historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // query Test HistoricIdentityLinkLogQuery query = historyService.createHistoricIdentityLinkLogQuery(); assertEquals(query.userId(XML_ASSIGNEE).count(), 1); } @Deployment(resources = { "org/camunda/bpm/engine/test/api/runtime/OneTaskProcessWithCandidateGroups.bpmn20.xml" }) public void testShouldAddTaskCandidateGroupforAddIdentityLinkUsingXml() { // Pre test List<HistoricIdentityLinkLog> historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 0); // given startProcessInstance(PROCESS_DEFINITION_KEY_CANDIDATE_GROUP); historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // query Test HistoricIdentityLinkLogQuery query = historyService.createHistoricIdentityLinkLogQuery(); assertEquals(query.groupId(XML_GROUP).count(), 1); } @Deployment(resources = { "org/camunda/bpm/engine/test/api/runtime/OneTaskProcessWithCandidateStarterUsers.bpmn20.xml" }) public void testShouldAddProcessCandidateStarterUserforAddIdentityLinkUsingXml() { // Pre test - Historical identity link is added as part of deployment List<HistoricIdentityLinkLog> historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // given ProcessDefinition latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey(PROCESS_DEFINITION_KEY_CANDIDATE_STARTER_USER) .singleResult(); assertNotNull(latestProcessDef); List<IdentityLink> links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId()); assertEquals(1, links.size()); historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // query Test HistoricIdentityLinkLogQuery query = historyService.createHistoricIdentityLinkLogQuery(); assertEquals(query.userId(XML_USER).count(), 1); } @Deployment(resources = { "org/camunda/bpm/engine/test/api/runtime/OneTaskProcessWithCandidateStarterGroups.bpmn20.xml" }) public void testShouldAddProcessCandidateStarterGroupforAddIdentityLinkUsingXml() { // Pre test - Historical identity link is added as part of deployment List<HistoricIdentityLinkLog> historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // given ProcessDefinition latestProcessDef = repositoryService.createProcessDefinitionQuery().processDefinitionKey(PROCESS_DEFINITION_KEY_CANDIDATE_STARTER_GROUP) .singleResult(); assertNotNull(latestProcessDef); List<IdentityLink> links = repositoryService.getIdentityLinksForProcessDefinition(latestProcessDef.getId()); assertEquals(1, links.size()); historicIdentityLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicIdentityLinks.size(), 1); // query Test HistoricIdentityLinkLogQuery query = historyService.createHistoricIdentityLinkLogQuery(); assertEquals(query.groupId(XML_GROUP).count(), 1); } public void testPropagateTenantIdToCandidateStarterUser() { // when org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeployment() .addClasspathResource(CANDIDATE_STARTER_USER) .tenantId(TENANT_ONE) .deploy(); // then List<HistoricIdentityLinkLog> historicLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicLinks.size(), 1); HistoricIdentityLinkLog historicLink = historicLinks.get(0); assertNotNull(historicLink.getTenantId()); assertEquals(TENANT_ONE, historicLink.getTenantId()); repositoryService.deleteDeployment(deployment.getId(), true); } public void testPropagateTenantIdToCandidateStarterUsers() { // when org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeployment() .addClasspathResource(CANDIDATE_STARTER_USERS) .tenantId(TENANT_ONE) .deploy(); // then List<HistoricIdentityLinkLog> historicLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(3, historicLinks.size()); for (HistoricIdentityLinkLog historicLink : historicLinks) { assertNotNull(historicLink.getTenantId()); assertEquals(TENANT_ONE, historicLink.getTenantId()); } repositoryService.deleteDeployment(deployment.getId(), true); } public void testPropagateTenantIdToCandidateStarterGroup() { // when org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeployment() .addClasspathResource(CANDIDATE_STARTER_GROUP) .tenantId(TENANT_ONE) .deploy(); // then List<HistoricIdentityLinkLog> historicLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(historicLinks.size(), 1); HistoricIdentityLinkLog historicLink = historicLinks.get(0); assertNotNull(historicLink.getTenantId()); assertEquals(TENANT_ONE, historicLink.getTenantId()); repositoryService.deleteDeployment(deployment.getId(), true); } public void testPropagateTenantIdToCandidateStarterGroups() { // when org.camunda.bpm.engine.repository.Deployment deployment = repositoryService.createDeployment() .addClasspathResource(CANDIDATE_STARTER_GROUPS) .tenantId(TENANT_ONE) .deploy(); // then List<HistoricIdentityLinkLog> historicLinks = historyService.createHistoricIdentityLinkLogQuery().list(); assertEquals(3, historicLinks.size()); for (HistoricIdentityLinkLog historicLink : historicLinks) { assertNotNull(historicLink.getTenantId()); assertEquals(TENANT_ONE, historicLink.getTenantId()); } repositoryService.deleteDeployment(deployment.getId(), true); } protected ProcessInstance startProcessInstance(String key) { return runtimeService.startProcessInstanceByKey(key); } }
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android.encode; import android.app.Activity; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.provider.ContactsContract; import android.telephony.PhoneNumberUtils; import android.util.Log; import com.google.zxing.BarcodeFormat; import com.google.zxing.EncodeHintType; import com.google.zxing.MultiFormatWriter; import com.google.zxing.Result; import com.google.zxing.WriterException; import com.google.zxing.client.android.Contents; import com.google.zxing.client.android.Intents; import com.progost.remotify.R; import com.google.zxing.client.result.AddressBookParsedResult; import com.google.zxing.client.result.ParsedResult; import com.google.zxing.client.result.ResultParser; import com.google.zxing.common.BitMatrix; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumMap; import java.util.Map; /** * This class does the work of decoding the user's request and extracting all the data * to be encoded in a barcode. * * @author dswitkin@google.com (Daniel Switkin) */ final class QRCodeEncoder { private static final String TAG = QRCodeEncoder.class.getSimpleName(); private static final int WHITE = 0xFFFFFFFF; private static final int BLACK = 0xFF000000; private final Activity activity; private String contents; private String displayContents; private String title; private BarcodeFormat format; private final int dimension; private final boolean useVCard; QRCodeEncoder(Activity activity, Intent intent, int dimension, boolean useVCard) throws WriterException { this.activity = activity; this.dimension = dimension; this.useVCard = useVCard; String action = intent.getAction(); if (action.equals(Intents.Encode.ACTION)) { encodeContentsFromZXingIntent(intent); } else if (action.equals(Intent.ACTION_SEND)) { encodeContentsFromShareIntent(intent); } } String getContents() { return contents; } String getDisplayContents() { return displayContents; } String getTitle() { return title; } boolean isUseVCard() { return useVCard; } // It would be nice if the string encoding lived in the core ZXing library, // but we use platform specific code like PhoneNumberUtils, so it can't. private boolean encodeContentsFromZXingIntent(Intent intent) { // Default to QR_CODE if no format given. String formatString = intent.getStringExtra(Intents.Encode.FORMAT); format = null; if (formatString != null) { try { format = BarcodeFormat.valueOf(formatString); } catch (IllegalArgumentException iae) { // Ignore it then } } if (format == null || format == BarcodeFormat.QR_CODE) { String type = intent.getStringExtra(Intents.Encode.TYPE); if (type == null || type.length() == 0) { return false; } this.format = BarcodeFormat.QR_CODE; encodeQRCodeContents(intent, type); } else { String data = intent.getStringExtra(Intents.Encode.DATA); if (data != null && data.length() > 0) { contents = data; displayContents = data; title = activity.getString(R.string.contents_text); } } return contents != null && contents.length() > 0; } // Handles send intents from multitude of Android applications private void encodeContentsFromShareIntent(Intent intent) throws WriterException { // Check if this is a plain text encoding, or contact if (intent.hasExtra(Intent.EXTRA_STREAM)) { encodeFromStreamExtra(intent); } else { encodeFromTextExtras(intent); } } private void encodeFromTextExtras(Intent intent) throws WriterException { // Notice: Google Maps shares both URL and details in one text, bummer! String theContents = ContactEncoder.trim(intent.getStringExtra(Intent.EXTRA_TEXT)); if (theContents == null) { theContents = ContactEncoder.trim(intent.getStringExtra("android.intent.extra.HTML_TEXT")); // Intent.EXTRA_HTML_TEXT if (theContents == null) { theContents = ContactEncoder.trim(intent.getStringExtra(Intent.EXTRA_SUBJECT)); if (theContents == null) { String[] emails = intent.getStringArrayExtra(Intent.EXTRA_EMAIL); if (emails != null) { theContents = ContactEncoder.trim(emails[0]); } else { theContents = "?"; } } } } // Trim text to avoid URL breaking. if (theContents == null || theContents.length() == 0) { throw new WriterException("Empty EXTRA_TEXT"); } contents = theContents; // We only do QR code. format = BarcodeFormat.QR_CODE; if (intent.hasExtra(Intent.EXTRA_SUBJECT)) { displayContents = intent.getStringExtra(Intent.EXTRA_SUBJECT); } else if (intent.hasExtra(Intent.EXTRA_TITLE)) { displayContents = intent.getStringExtra(Intent.EXTRA_TITLE); } else { displayContents = contents; } title = activity.getString(R.string.contents_text); } // Handles send intents from the Contacts app, retrieving a contact as a VCARD. private void encodeFromStreamExtra(Intent intent) throws WriterException { format = BarcodeFormat.QR_CODE; Bundle bundle = intent.getExtras(); if (bundle == null) { throw new WriterException("No extras"); } Uri uri = bundle.getParcelable(Intent.EXTRA_STREAM); if (uri == null) { throw new WriterException("No EXTRA_STREAM"); } byte[] vcard; String vcardString; try { InputStream stream = activity.getContentResolver().openInputStream(uri); ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[2048]; int bytesRead; while ((bytesRead = stream.read(buffer)) > 0) { baos.write(buffer, 0, bytesRead); } vcard = baos.toByteArray(); vcardString = new String(vcard, 0, vcard.length, "UTF-8"); } catch (IOException ioe) { throw new WriterException(ioe); } Log.d(TAG, "Encoding share intent content:"); Log.d(TAG, vcardString); Result result = new Result(vcardString, vcard, null, BarcodeFormat.QR_CODE); ParsedResult parsedResult = ResultParser.parseResult(result); if (!(parsedResult instanceof AddressBookParsedResult)) { throw new WriterException("Result was not an address"); } encodeQRCodeContents((AddressBookParsedResult) parsedResult); if (contents == null || contents.length() == 0) { throw new WriterException("No content to encode"); } } private void encodeQRCodeContents(Intent intent, String type) { if (type.equals(Contents.Type.TEXT)) { String data = intent.getStringExtra(Intents.Encode.DATA); if (data != null && data.length() > 0) { contents = data; displayContents = data; title = activity.getString(R.string.contents_text); } } else if (type.equals(Contents.Type.EMAIL)) { String data = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA)); if (data != null) { contents = "mailto:" + data; displayContents = data; title = activity.getString(R.string.contents_email); } } else if (type.equals(Contents.Type.PHONE)) { String data = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA)); if (data != null) { contents = "tel:" + data; displayContents = PhoneNumberUtils.formatNumber(data); title = activity.getString(R.string.contents_phone); } } else if (type.equals(Contents.Type.SMS)) { String data = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA)); if (data != null) { contents = "sms:" + data; displayContents = PhoneNumberUtils.formatNumber(data); title = activity.getString(R.string.contents_sms); } } else if (type.equals(Contents.Type.CONTACT)) { Bundle bundle = intent.getBundleExtra(Intents.Encode.DATA); if (bundle != null) { String name = bundle.getString(ContactsContract.Intents.Insert.NAME); String organization = bundle.getString(ContactsContract.Intents.Insert.COMPANY); String address = bundle.getString(ContactsContract.Intents.Insert.POSTAL); Collection<String> phones = new ArrayList<String>(Contents.PHONE_KEYS.length); for (int x = 0; x < Contents.PHONE_KEYS.length; x++) { phones.add(bundle.getString(Contents.PHONE_KEYS[x])); } Collection<String> emails = new ArrayList<String>(Contents.EMAIL_KEYS.length); for (int x = 0; x < Contents.EMAIL_KEYS.length; x++) { emails.add(bundle.getString(Contents.EMAIL_KEYS[x])); } String url = bundle.getString(Contents.URL_KEY); Collection<String> urls = url == null ? null : Collections.singletonList(url); String note = bundle.getString(Contents.NOTE_KEY); ContactEncoder mecardEncoder = useVCard ? new VCardContactEncoder() : new MECARDContactEncoder(); String[] encoded = mecardEncoder.encode(Collections.singleton(name), organization, Collections.singleton(address), phones, emails, urls, note); // Make sure we've encoded at least one field. if (encoded[1].length() > 0) { contents = encoded[0]; displayContents = encoded[1]; title = activity.getString(R.string.contents_contact); } } } else if (type.equals(Contents.Type.LOCATION)) { Bundle bundle = intent.getBundleExtra(Intents.Encode.DATA); if (bundle != null) { // These must use Bundle.getFloat(), not getDouble(), it's part of the API. float latitude = bundle.getFloat("LAT", Float.MAX_VALUE); float longitude = bundle.getFloat("LONG", Float.MAX_VALUE); if (latitude != Float.MAX_VALUE && longitude != Float.MAX_VALUE) { contents = "geo:" + latitude + ',' + longitude; displayContents = latitude + "," + longitude; title = activity.getString(R.string.contents_location); } } } } private void encodeQRCodeContents(AddressBookParsedResult contact) { ContactEncoder encoder = useVCard ? new VCardContactEncoder() : new MECARDContactEncoder(); String[] encoded = encoder.encode(toIterable(contact.getNames()), contact.getOrg(), toIterable(contact.getAddresses()), toIterable(contact.getPhoneNumbers()), toIterable(contact.getEmails()), toIterable(contact.getURLs()), null); // Make sure we've encoded at least one field. if (encoded[1].length() > 0) { contents = encoded[0]; displayContents = encoded[1]; title = activity.getString(R.string.contents_contact); } } private static Iterable<String> toIterable(String[] values) { return values == null ? null : Arrays.asList(values); } Bitmap encodeAsBitmap() throws WriterException { String contentsToEncode = contents; if (contentsToEncode == null) { return null; } Map<EncodeHintType,Object> hints = null; String encoding = guessAppropriateEncoding(contentsToEncode); if (encoding != null) { hints = new EnumMap<EncodeHintType,Object>(EncodeHintType.class); hints.put(EncodeHintType.CHARACTER_SET, encoding); } BitMatrix result; try { result = new MultiFormatWriter().encode(contentsToEncode, format, dimension, dimension, hints); } catch (IllegalArgumentException iae) { // Unsupported format return null; } int width = result.getWidth(); int height = result.getHeight(); int[] pixels = new int[width * height]; for (int y = 0; y < height; y++) { int offset = y * width; for (int x = 0; x < width; x++) { pixels[offset + x] = result.get(x, y) ? BLACK : WHITE; } } Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bitmap.setPixels(pixels, 0, width, 0, 0, width, height); return bitmap; } private static String guessAppropriateEncoding(CharSequence contents) { // Very crude at the moment for (int i = 0; i < contents.length(); i++) { if (contents.charAt(i) > 0xFF) { return "UTF-8"; } } return null; } }
package org.apache.ibatis.reflection; import org.apache.ibatis.reflection.invoker.GetFieldInvoker; import org.apache.ibatis.reflection.invoker.Invoker; import org.apache.ibatis.reflection.invoker.MethodInvoker; import org.apache.ibatis.reflection.invoker.SetFieldInvoker; import org.apache.ibatis.reflection.property.PropertyNamer; import java.lang.reflect.*; import java.util.*; /** * This class represents a cached set of class definition information that * allows for easy mapping between property names and getter/setter methods. */ public class Reflector { private static boolean classCacheEnabled = true; private static final String[] EMPTY_STRING_ARRAY = new String[0]; private static final Map<Class, Reflector> REFLECTOR_MAP = Collections.synchronizedMap(new HashMap<Class, Reflector>()); private Class type; private String[] readablePropertyNames = EMPTY_STRING_ARRAY; private String[] writeablePropertyNames = EMPTY_STRING_ARRAY; private Map<String, Invoker> setMethods = new HashMap<String, Invoker>(); private Map<String, Invoker> getMethods = new HashMap<String, Invoker>(); private Map<String, Class> setTypes = new HashMap<String, Class>(); private Map<String, Class> getTypes = new HashMap<String, Class>(); private Constructor defaultConstructor; private Map<String, String> caseInsensitivePropertyMap = new HashMap<String, String>(); private Reflector(Class clazz) { type = clazz; addDefaultConstructor(clazz); addGetMethods(clazz); addSetMethods(clazz); addFields(clazz); readablePropertyNames = getMethods.keySet().toArray(new String[getMethods.keySet().size()]); writeablePropertyNames = setMethods.keySet().toArray(new String[setMethods.keySet().size()]); for (String propName : readablePropertyNames) { caseInsensitivePropertyMap.put(propName.toUpperCase(Locale.ENGLISH), propName); } for (String propName : writeablePropertyNames) { caseInsensitivePropertyMap.put(propName.toUpperCase(Locale.ENGLISH), propName); } } private void addDefaultConstructor(Class clazz) { Constructor[] consts = clazz.getDeclaredConstructors(); for (Constructor constructor : consts) { if (constructor.getParameterTypes().length == 0) { if (canAccessPrivateMethods()) { try { constructor.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing we can do. } } if (constructor.isAccessible()) { this.defaultConstructor = constructor; } } } } private void addGetMethods(Class cls) { Method[] methods = getClassMethods(cls); for (Method method : methods) { String name = method.getName(); if (name.startsWith("get") && name.length() > 3) { if (method.getParameterTypes().length == 0) { name = PropertyNamer.methodToProperty(name); addGetMethod(name, method); } } else if (name.startsWith("is") && name.length() > 2) { if (method.getParameterTypes().length == 0) { name = PropertyNamer.methodToProperty(name); addGetMethod(name, method); } } } } private void addGetMethod(String name, Method method) { if (isValidPropertyName(name)) { getMethods.put(name, new MethodInvoker(method)); getTypes.put(name, method.getReturnType()); } } private void addSetMethods(Class cls) { Map<String, List<Method>> conflictingSetters = new HashMap<String, List<Method>>(); Method[] methods = getClassMethods(cls); for (Method method : methods) { String name = method.getName(); if (name.startsWith("set") && name.length() > 3) { if (method.getParameterTypes().length == 1) { name = PropertyNamer.methodToProperty(name); addSetterConflict(conflictingSetters, name, method); } } } resolveSetterConflicts(conflictingSetters); } private void addSetterConflict(Map<String, List<Method>> conflictingSetters, String name, Method method) { List<Method> list = conflictingSetters.get(name); if (list == null) { list = new ArrayList<Method>(); conflictingSetters.put(name, list); } list.add(method); } private void resolveSetterConflicts(Map<String, List<Method>> conflictingSetters) { for (String propName : conflictingSetters.keySet()) { List<Method> setters = conflictingSetters.get(propName); Method firstMethod = setters.get(0); if (setters.size() == 1) { addSetMethod(propName, firstMethod); } else { Class expectedType = getTypes.get(propName); if (expectedType == null) { throw new ReflectionException("Illegal overloaded setter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } else { Iterator<Method> methods = setters.iterator(); Method setter = null; while (methods.hasNext()) { Method method = methods.next(); if (method.getParameterTypes().length == 1 && expectedType.equals(method.getParameterTypes()[0])) { setter = method; break; } } if (setter == null) { throw new ReflectionException("Illegal overloaded setter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } addSetMethod(propName, setter); } } } } private void addSetMethod(String name, Method method) { if (isValidPropertyName(name)) { setMethods.put(name, new MethodInvoker(method)); setTypes.put(name, method.getParameterTypes()[0]); } } private void addFields(Class clazz) { Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { if (canAccessPrivateMethods()) { try { field.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing we can do. } } if (field.isAccessible()) { if (!setMethods.containsKey(field.getName())) { if (!Modifier.isFinal(field.getModifiers())) { addSetField(field); } } if (!getMethods.containsKey(field.getName())) { addGetField(field); } } } if (clazz.getSuperclass() != null) { addFields(clazz.getSuperclass()); } } private void addSetField(Field field) { if (isValidPropertyName(field.getName())) { setMethods.put(field.getName(), new SetFieldInvoker(field)); setTypes.put(field.getName(), field.getType()); } } private void addGetField(Field field) { if (isValidPropertyName(field.getName())) { getMethods.put(field.getName(), new GetFieldInvoker(field)); getTypes.put(field.getName(), field.getType()); } } private boolean isValidPropertyName(String name) { return !(name.startsWith("$") || "serialVersionUID".equals(name) || "class".equals(name)); } /** * This method returns an array containing all methods * declared in this class and any superclass. * We use this method, instead of the simpler Class.getMethods(), * because we want to look for private methods as well. * * @param cls The class * @return An array containing all methods in this class */ private Method[] getClassMethods(Class cls) { HashMap<String, Method> uniqueMethods = new HashMap<String, Method>(); Class currentClass = cls; while (currentClass != null) { addUniqueMethods(uniqueMethods, currentClass.getDeclaredMethods()); // we also need to look for interface methods - // because the class may be abstract Class[] interfaces = currentClass.getInterfaces(); for (Class anInterface : interfaces) { addUniqueMethods(uniqueMethods, anInterface.getMethods()); } currentClass = currentClass.getSuperclass(); } Collection<Method> methods = uniqueMethods.values(); return methods.toArray(new Method[methods.size()]); } private void addUniqueMethods(HashMap<String, Method> uniqueMethods, Method[] methods) { for (Method currentMethod : methods) { if (!currentMethod.isBridge()) { String signature = getSignature(currentMethod); // check to see if the method is already known // if it is known, then an extended class must have // overridden a method if (!uniqueMethods.containsKey(signature)) { if (canAccessPrivateMethods()) { try { currentMethod.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing we can do. } } uniqueMethods.put(signature, currentMethod); } } } } private String getSignature(Method method) { StringBuffer sb = new StringBuffer(); sb.append(method.getName()); Class[] parameters = method.getParameterTypes(); for (int i = 0; i < parameters.length; i++) { if (i == 0) { sb.append(':'); } else { sb.append(','); } sb.append(parameters[i].getName()); } return sb.toString(); } private static boolean canAccessPrivateMethods() { try { SecurityManager securityManager = System.getSecurityManager(); if (null != securityManager) { securityManager.checkPermission(new ReflectPermission("suppressAccessChecks")); } } catch (SecurityException e) { return false; } return true; } /** * Gets the name of the class the instance provides information for * * @return The class name */ public Class getType() { return type; } public Constructor getDefaultConstructor() { if (defaultConstructor != null) { return defaultConstructor; } else { throw new ReflectionException("There is no default constructor for " + type); } } public Invoker getSetInvoker(String propertyName) { Invoker method = setMethods.get(propertyName); if (method == null) { throw new ReflectionException("There is no setter for property named '" + propertyName + "' in '" + type + "'"); } return method; } public Invoker getGetInvoker(String propertyName) { Invoker method = getMethods.get(propertyName); if (method == null) { throw new ReflectionException("There is no getter for property named '" + propertyName + "' in '" + type + "'"); } return method; } /** * Gets the type for a property setter * * @param propertyName - the name of the property * @return The Class of the propery setter */ public Class getSetterType(String propertyName) { Class clazz = setTypes.get(propertyName); if (clazz == null) { throw new ReflectionException("There is no setter for property named '" + propertyName + "' in '" + type + "'"); } return clazz; } /** * Gets the type for a property getter * * @param propertyName - the name of the property * @return The Class of the propery getter */ public Class getGetterType(String propertyName) { Class clazz = getTypes.get(propertyName); if (clazz == null) { throw new ReflectionException("There is no getter for property named '" + propertyName + "' in '" + type + "'"); } return clazz; } /** * Gets an array of the readable properties for an object * * @return The array */ public String[] getGetablePropertyNames() { return readablePropertyNames; } /** * Gets an array of the writeable properties for an object * * @return The array */ public String[] getSetablePropertyNames() { return writeablePropertyNames; } /** * Check to see if a class has a writeable property by name * * @param propertyName - the name of the property to check * @return True if the object has a writeable property by the name */ public boolean hasSetter(String propertyName) { return setMethods.keySet().contains(propertyName); } /** * Check to see if a class has a readable property by name * * @param propertyName - the name of the property to check * @return True if the object has a readable property by the name */ public boolean hasGetter(String propertyName) { return getMethods.keySet().contains(propertyName); } public String findPropertyName(String name) { return caseInsensitivePropertyMap.get(name.toUpperCase(Locale.ENGLISH)); } /** * Gets an instance of ClassInfo for the specified class. * * @param clazz The class for which to lookup the method cache. * @return The method cache for the class */ public static Reflector forClass(Class clazz) { if (classCacheEnabled) { synchronized (clazz) { Reflector cached = REFLECTOR_MAP.get(clazz); if (cached == null) { cached = new Reflector(clazz); REFLECTOR_MAP.put(clazz, cached); } return cached; } } else { return new Reflector(clazz); } } public static void setClassCacheEnabled(boolean classCacheEnabled) { Reflector.classCacheEnabled = classCacheEnabled; } public static boolean isClassCacheEnabled() { return classCacheEnabled; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.tree; import javax.annotation.Nullable; public abstract class AstVisitor<R, C> { public R process(Node node, @Nullable C context) { return node.accept(this, context); } protected R visitNode(Node node, C context) { return null; } protected R visitExpression(Expression node, C context) { return visitNode(node, context); } protected R visitCurrentTime(CurrentTime node, C context) { return visitExpression(node, context); } protected R visitExtract(Extract node, C context) { return visitExpression(node, context); } protected R visitArithmeticBinary(ArithmeticBinaryExpression node, C context) { return visitExpression(node, context); } protected R visitBetweenPredicate(BetweenPredicate node, C context) { return visitExpression(node, context); } protected R visitCoalesceExpression(CoalesceExpression node, C context) { return visitExpression(node, context); } protected R visitComparisonExpression(ComparisonExpression node, C context) { return visitExpression(node, context); } protected R visitLiteral(Literal node, C context) { return visitExpression(node, context); } protected R visitDoubleLiteral(DoubleLiteral node, C context) { return visitLiteral(node, context); } protected R visitStatement(Statement node, C context) { return visitNode(node, context); } protected R visitQuery(Query node, C context) { return visitStatement(node, context); } protected R visitExplain(Explain node, C context) { return visitStatement(node, context); } protected R visitShowTables(ShowTables node, C context) { return visitStatement(node, context); } protected R visitShowSchemas(ShowSchemas node, C context) { return visitStatement(node, context); } protected R visitShowCatalogs(ShowCatalogs node, C context) { return visitStatement(node, context); } protected R visitShowColumns(ShowColumns node, C context) { return visitStatement(node, context); } protected R visitShowPartitions(ShowPartitions node, C context) { return visitStatement(node, context); } protected R visitShowFunctions(ShowFunctions node, C context) { return visitStatement(node, context); } protected R visitUse(Use node, C context) { return visitStatement(node, context); } protected R visitShowSession(ShowSession node, C context) { return visitStatement(node, context); } protected R visitSetSession(SetSession node, C context) { return visitStatement(node, context); } public R visitResetSession(ResetSession node, C context) { return visitStatement(node, context); } protected R visitGenericLiteral(GenericLiteral node, C context) { return visitLiteral(node, context); } protected R visitTimeLiteral(TimeLiteral node, C context) { return visitLiteral(node, context); } protected R visitExplainOption(ExplainOption node, C context) { return visitNode(node, context); } protected R visitWith(With node, C context) { return visitNode(node, context); } protected R visitApproximate(Approximate node, C context) { return visitNode(node, context); } protected R visitWithQuery(WithQuery node, C context) { return visitNode(node, context); } protected R visitSelect(Select node, C context) { return visitNode(node, context); } protected R visitRelation(Relation node, C context) { return visitNode(node, context); } protected R visitQueryBody(QueryBody node, C context) { return visitRelation(node, context); } protected R visitQuerySpecification(QuerySpecification node, C context) { return visitQueryBody(node, context); } protected R visitSetOperation(SetOperation node, C context) { return visitQueryBody(node, context); } protected R visitUnion(Union node, C context) { return visitSetOperation(node, context); } protected R visitIntersect(Intersect node, C context) { return visitSetOperation(node, context); } protected R visitExcept(Except node, C context) { return visitSetOperation(node, context); } protected R visitTimestampLiteral(TimestampLiteral node, C context) { return visitLiteral(node, context); } protected R visitWhenClause(WhenClause node, C context) { return visitExpression(node, context); } protected R visitIntervalLiteral(IntervalLiteral node, C context) { return visitLiteral(node, context); } protected R visitInPredicate(InPredicate node, C context) { return visitExpression(node, context); } protected R visitFunctionCall(FunctionCall node, C context) { return visitExpression(node, context); } protected R visitLambdaExpression(LambdaExpression node, C context) { return visitExpression(node, context); } protected R visitSimpleCaseExpression(SimpleCaseExpression node, C context) { return visitExpression(node, context); } protected R visitStringLiteral(StringLiteral node, C context) { return visitLiteral(node, context); } protected R visitBinaryLiteral(BinaryLiteral node, C context) { return visitLiteral(node, context); } protected R visitBooleanLiteral(BooleanLiteral node, C context) { return visitLiteral(node, context); } protected R visitInListExpression(InListExpression node, C context) { return visitExpression(node, context); } protected R visitQualifiedNameReference(QualifiedNameReference node, C context) { return visitExpression(node, context); } protected R visitDereferenceExpression(DereferenceExpression node, C context) { return visitExpression(node, context); } protected R visitNullIfExpression(NullIfExpression node, C context) { return visitExpression(node, context); } protected R visitIfExpression(IfExpression node, C context) { return visitExpression(node, context); } protected R visitNullLiteral(NullLiteral node, C context) { return visitLiteral(node, context); } protected R visitArithmeticUnary(ArithmeticUnaryExpression node, C context) { return visitExpression(node, context); } protected R visitNotExpression(NotExpression node, C context) { return visitExpression(node, context); } protected R visitSelectItem(SelectItem node, C context) { return visitNode(node, context); } protected R visitSingleColumn(SingleColumn node, C context) { return visitSelectItem(node, context); } protected R visitAllColumns(AllColumns node, C context) { return visitSelectItem(node, context); } protected R visitSearchedCaseExpression(SearchedCaseExpression node, C context) { return visitExpression(node, context); } protected R visitLikePredicate(LikePredicate node, C context) { return visitExpression(node, context); } protected R visitIsNotNullPredicate(IsNotNullPredicate node, C context) { return visitExpression(node, context); } protected R visitIsNullPredicate(IsNullPredicate node, C context) { return visitExpression(node, context); } protected R visitArrayConstructor(ArrayConstructor node, C context) { return visitExpression(node, context); } protected R visitSubscriptExpression(SubscriptExpression node, C context) { return visitExpression(node, context); } protected R visitLongLiteral(LongLiteral node, C context) { return visitLiteral(node, context); } protected R visitLogicalBinaryExpression(LogicalBinaryExpression node, C context) { return visitExpression(node, context); } protected R visitSubqueryExpression(SubqueryExpression node, C context) { return visitExpression(node, context); } protected R visitSortItem(SortItem node, C context) { return visitNode(node, context); } protected R visitTable(Table node, C context) { return visitQueryBody(node, context); } protected R visitUnnest(Unnest node, C context) { return visitRelation(node, context); } protected R visitValues(Values node, C context) { return visitQueryBody(node, context); } protected R visitRow(Row node, C context) { return visitNode(node, context); } protected R visitTableSubquery(TableSubquery node, C context) { return visitQueryBody(node, context); } protected R visitAliasedRelation(AliasedRelation node, C context) { return visitRelation(node, context); } protected R visitSampledRelation(SampledRelation node, C context) { return visitRelation(node, context); } protected R visitJoin(Join node, C context) { return visitRelation(node, context); } protected R visitExists(ExistsPredicate node, C context) { return visitExpression(node, context); } protected R visitCast(Cast node, C context) { return visitExpression(node, context); } protected R visitInputReference(InputReference node, C context) { return visitExpression(node, context); } protected R visitWindow(Window node, C context) { return visitNode(node, context); } protected R visitWindowFrame(WindowFrame node, C context) { return visitNode(node, context); } protected R visitFrameBound(FrameBound node, C context) { return visitNode(node, context); } protected R visitCallArgument(CallArgument node, C context) { return visitNode(node, context); } protected R visitTableElement(TableElement node, C context) { return visitNode(node, context); } protected R visitCreateTable(CreateTable node, C context) { return visitStatement(node, context); } protected R visitCreateTableAsSelect(CreateTableAsSelect node, C context) { return visitStatement(node, context); } protected R visitDropTable(DropTable node, C context) { return visitStatement(node, context); } protected R visitRenameTable(RenameTable node, C context) { return visitStatement(node, context); } protected R visitRenameColumn(RenameColumn node, C context) { return visitStatement(node, context); } protected R visitAddColumn(AddColumn node, C context) { return visitStatement(node, context); } protected R visitCreateView(CreateView node, C context) { return visitStatement(node, context); } protected R visitDropView(DropView node, C context) { return visitStatement(node, context); } protected R visitInsert(Insert node, C context) { return visitNode(node, context); } protected R visitCall(Call node, C context) { return visitNode(node, context); } protected R visitDelete(Delete node, C context) { return visitStatement(node, context); } protected R visitStartTransaction(StartTransaction node, C context) { return visitStatement(node, context); } protected R visitTransactionMode(TransactionMode node, C context) { return visitNode(node, context); } protected R visitIsolationLevel(Isolation node, C context) { return visitTransactionMode(node, context); } protected R visitTransactionAccessMode(TransactionAccessMode node, C context) { return visitTransactionMode(node, context); } protected R visitCommit(Commit node, C context) { return visitStatement(node, context); } protected R visitRollback(Rollback node, C context) { return visitStatement(node, context); } }
/* * Copyright (c) 2016-2021 VMware Inc. or its affiliates, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.ArrayDeque; import java.util.Collection; import java.util.Objects; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import java.util.function.BooleanSupplier; import java.util.function.Supplier; import org.reactivestreams.Subscription; import reactor.core.CoreSubscriber; import reactor.util.annotation.Nullable; import reactor.util.context.Context; /** * Buffers a certain number of subsequent elements and emits the buffers. * * @param <T> the source value type * @param <C> the buffer collection type * * @see <a href="https://github.com/reactor/reactive-streams-commons">Reactive-Streams-Commons</a> */ final class FluxBuffer<T, C extends Collection<? super T>> extends InternalFluxOperator<T, C> { final int size; final int skip; final Supplier<C> bufferSupplier; FluxBuffer(Flux<? extends T> source, int size, Supplier<C> bufferSupplier) { this(source, size, size, bufferSupplier); } FluxBuffer(Flux<? extends T> source, int size, int skip, Supplier<C> bufferSupplier) { super(source); if (size <= 0) { throw new IllegalArgumentException("size > 0 required but it was " + size); } if (skip <= 0) { throw new IllegalArgumentException("skip > 0 required but it was " + skip); } this.size = size; this.skip = skip; this.bufferSupplier = Objects.requireNonNull(bufferSupplier, "bufferSupplier"); } @Override public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super C> actual) { if (size == skip) { return new BufferExactSubscriber<>(actual, size, bufferSupplier); } else if (skip > size) { return new BufferSkipSubscriber<>(actual, size, skip, bufferSupplier); } else { return new BufferOverlappingSubscriber<>(actual, size, skip, bufferSupplier); } } @Override public Object scanUnsafe(Attr key) { if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC; return super.scanUnsafe(key); } static final class BufferExactSubscriber<T, C extends Collection<? super T>> implements InnerOperator<T, C> { final CoreSubscriber<? super C> actual; final Supplier<C> bufferSupplier; final int size; C buffer; Subscription s; boolean done; BufferExactSubscriber(CoreSubscriber<? super C> actual, int size, Supplier<C> bufferSupplier) { this.actual = actual; this.size = size; this.bufferSupplier = bufferSupplier; } @Override public void request(long n) { if (Operators.validate(n)) { s.request(Operators.multiplyCap(n, size)); } } @Override public void cancel() { s.cancel(); Operators.onDiscardMultiple(buffer, actual.currentContext()); } @Override public void onSubscribe(Subscription s) { if (Operators.validate(this.s, s)) { this.s = s; actual.onSubscribe(this); } } @Override public void onNext(T t) { if (done) { Operators.onNextDropped(t, actual.currentContext()); return; } C b = buffer; if (b == null) { try { b = Objects.requireNonNull(bufferSupplier.get(), "The bufferSupplier returned a null buffer"); } catch (Throwable e) { Context ctx = actual.currentContext(); onError(Operators.onOperatorError(s, e, t, ctx)); Operators.onDiscard(t, ctx); //this is in no buffer return; } buffer = b; } b.add(t); if (b.size() == size) { buffer = null; actual.onNext(b); } } @Override public void onError(Throwable t) { if (done) { Operators.onErrorDropped(t, actual.currentContext()); return; } done = true; actual.onError(t); Operators.onDiscardMultiple(buffer, actual.currentContext()); } @Override public void onComplete() { if (done) { return; } done = true; C b = buffer; if (b != null && !b.isEmpty()) { actual.onNext(b); } actual.onComplete(); } @Override public CoreSubscriber<? super C> actual() { return actual; } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.PARENT) return s; if (key == Attr.TERMINATED) return done; if (key == Attr.BUFFERED) { C b = buffer; return b != null ? b.size() : 0; } if (key == Attr.CAPACITY) return size; if (key == Attr.PREFETCH) return size; if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC; return InnerOperator.super.scanUnsafe(key); } } static final class BufferSkipSubscriber<T, C extends Collection<? super T>> implements InnerOperator<T, C> { final CoreSubscriber<? super C> actual; final Context ctx; final Supplier<C> bufferSupplier; final int size; final int skip; C buffer; Subscription s; boolean done; long index; volatile int wip; @SuppressWarnings("rawtypes") static final AtomicIntegerFieldUpdater<BufferSkipSubscriber> WIP = AtomicIntegerFieldUpdater.newUpdater(BufferSkipSubscriber.class, "wip"); BufferSkipSubscriber(CoreSubscriber<? super C> actual, int size, int skip, Supplier<C> bufferSupplier) { this.actual = actual; this.ctx = actual.currentContext(); this.size = size; this.skip = skip; this.bufferSupplier = bufferSupplier; } @Override public void request(long n) { if (!Operators.validate(n)) { return; } if (wip == 0 && WIP.compareAndSet(this, 0, 1)) { // n full buffers long u = Operators.multiplyCap(n, size); // + (n - 1) gaps long v = Operators.multiplyCap(skip - size, n - 1); s.request(Operators.addCap(u, v)); } else { // n full buffer + gap s.request(Operators.multiplyCap(skip, n)); } } @Override public void cancel() { s.cancel(); Operators.onDiscardMultiple(buffer, this.ctx); } @Override public void onSubscribe(Subscription s) { if (Operators.validate(this.s, s)) { this.s = s; actual.onSubscribe(this); } } @Override public void onNext(T t) { if (done) { Operators.onNextDropped(t, this.ctx); return; } C b = buffer; long i = index; if (i % skip == 0L) { try { b = Objects.requireNonNull(bufferSupplier.get(), "The bufferSupplier returned a null buffer"); } catch (Throwable e) { onError(Operators.onOperatorError(s, e, t, this.ctx)); Operators.onDiscard(t, this.ctx); //t hasn't got a chance to end up in any buffer return; } buffer = b; } if (b != null) { b.add(t); if (b.size() == size) { buffer = null; actual.onNext(b); } } else { //dropping Operators.onDiscard(t, this.ctx); } index = i + 1; } @Override public void onError(Throwable t) { if (done) { Operators.onErrorDropped(t, this.ctx); return; } done = true; C b = buffer; buffer = null; actual.onError(t); Operators.onDiscardMultiple(b, this.ctx); } @Override public void onComplete() { if (done) { return; } done = true; C b = buffer; buffer = null; if (b != null) { actual.onNext(b); } actual.onComplete(); } @Override public CoreSubscriber<? super C> actual() { return actual; } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.PARENT) return s; if (key == Attr.TERMINATED) return done; if (key == Attr.CAPACITY) return size; if (key == Attr.BUFFERED) { C b = buffer; return b != null ? b.size() : 0; } if (key == Attr.PREFETCH) return size; if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC; return InnerOperator.super.scanUnsafe(key); } } static final class BufferOverlappingSubscriber<T, C extends Collection<? super T>> extends ArrayDeque<C> implements BooleanSupplier, InnerOperator<T, C> { final CoreSubscriber<? super C> actual; final Supplier<C> bufferSupplier; final int size; final int skip; Subscription s; boolean done; long index; volatile boolean cancelled; long produced; volatile int once; @SuppressWarnings("rawtypes") static final AtomicIntegerFieldUpdater<BufferOverlappingSubscriber> ONCE = AtomicIntegerFieldUpdater.newUpdater(BufferOverlappingSubscriber.class, "once"); volatile long requested; @SuppressWarnings("rawtypes") static final AtomicLongFieldUpdater<BufferOverlappingSubscriber> REQUESTED = AtomicLongFieldUpdater.newUpdater(BufferOverlappingSubscriber.class, "requested"); BufferOverlappingSubscriber(CoreSubscriber<? super C> actual, int size, int skip, Supplier<C> bufferSupplier) { this.actual = actual; this.size = size; this.skip = skip; this.bufferSupplier = bufferSupplier; } @Override public boolean getAsBoolean() { return cancelled; } @Override public void request(long n) { if (!Operators.validate(n)) { return; } if (DrainUtils.postCompleteRequest(n, actual, this, REQUESTED, this, this)) { return; } if (once == 0 && ONCE.compareAndSet(this, 0, 1)) { // (n - 1) skips long u = Operators.multiplyCap(skip, n - 1); // + 1 full buffer long r = Operators.addCap(size, u); s.request(r); } else { // n skips long r = Operators.multiplyCap(skip, n); s.request(r); } } @Override public void cancel() { cancelled = true; s.cancel(); clear(); } @Override public void onSubscribe(Subscription s) { if (Operators.validate(this.s, s)) { this.s = s; actual.onSubscribe(this); } } @Override public void onNext(T t) { if (done) { Operators.onNextDropped(t, actual.currentContext()); return; } long i = index; if (i % skip == 0L) { C b; try { b = Objects.requireNonNull(bufferSupplier.get(), "The bufferSupplier returned a null buffer"); } catch (Throwable e) { Context ctx = actual.currentContext(); onError(Operators.onOperatorError(s, e, t, ctx)); Operators.onDiscard(t, ctx); //didn't get a chance to be added to a buffer return; } offer(b); } C b = peek(); if (b != null && b.size() + 1 == size) { poll(); b.add(t); actual.onNext(b); produced++; } for (C b0 : this) { b0.add(t); } index = i + 1; } @Override public void onError(Throwable t) { if (done) { Operators.onErrorDropped(t, actual.currentContext()); return; } done = true; clear(); actual.onError(t); } @Override public void clear() { Context ctx = actual.currentContext(); for(C b: this) { Operators.onDiscardMultiple(b, ctx); } super.clear(); } @Override public void onComplete() { if (done) { return; } done = true; long p = produced; if (p != 0L) { Operators.produced(REQUESTED,this, p); } DrainUtils.postComplete(actual, this, REQUESTED, this, this); } @Override public CoreSubscriber<? super C> actual() { return actual; } @Override @Nullable public Object scanUnsafe(Attr key) { if (key == Attr.PARENT) return s; if (key == Attr.TERMINATED) return done; if (key == Attr.CANCELLED) return cancelled; if (key == Attr.CAPACITY) return size() * size; if (key == Attr.BUFFERED) return stream().mapToInt(Collection::size).sum(); if (key == Attr.PREFETCH) return Integer.MAX_VALUE; if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested; if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC; return InnerOperator.super.scanUnsafe(key); } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.action; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessages; import org.apache.struts.util.MessageResources; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.unitime.commons.Debug; import org.unitime.commons.web.WebTable; import org.unitime.localization.impl.Localization; import org.unitime.timetable.defaults.ApplicationProperty; import org.unitime.timetable.defaults.CommonValues; import org.unitime.timetable.defaults.UserProperty; import org.unitime.timetable.form.InstructorEditForm; import org.unitime.timetable.gwt.resources.GwtConstants; import org.unitime.timetable.interfaces.ExternalUidLookup.UserInfo; import org.unitime.timetable.model.Assignment; import org.unitime.timetable.model.ClassInstructor; import org.unitime.timetable.model.Class_; import org.unitime.timetable.model.Department; import org.unitime.timetable.model.DepartmentalInstructor; import org.unitime.timetable.model.Event; import org.unitime.timetable.model.Meeting; import org.unitime.timetable.model.Preference; import org.unitime.timetable.model.TimePattern; import org.unitime.timetable.model.TimePref; import org.unitime.timetable.model.Event.MultiMeeting; import org.unitime.timetable.model.comparators.ClassComparator; import org.unitime.timetable.model.comparators.ClassInstructorComparator; import org.unitime.timetable.model.dao.DepartmentalInstructorDAO; import org.unitime.timetable.model.dao.EventDAO; import org.unitime.timetable.security.SessionContext; import org.unitime.timetable.security.rights.Right; import org.unitime.timetable.solver.ClassAssignmentProxy; import org.unitime.timetable.solver.SolverProxy; import org.unitime.timetable.solver.interactive.ClassAssignmentDetails; import org.unitime.timetable.solver.service.AssignmentService; import org.unitime.timetable.solver.service.SolverService; import org.unitime.timetable.util.Constants; import org.unitime.timetable.util.DefaultRoomAvailabilityService; import org.unitime.timetable.util.Formats; import org.unitime.timetable.util.LookupTables; import org.unitime.timetable.util.NameFormat; import org.unitime.timetable.util.RoomAvailability; import org.unitime.timetable.webutil.BackTracker; import org.unitime.timetable.webutil.Navigation; /** * MyEclipse Struts * Creation date: 07-18-2006 * * XDoclet definition: * @struts.action path="/instructorDetail" name="instructorEditForm" input="/user/instructorDetail.jsp" scope="request" * * @author Tomas Muller, Stephanie Schluttenhofer */ @Service("/instructorDetail") public class InstructorDetailAction extends PreferencesAction { public static GwtConstants CONSTANTS = Localization.create(GwtConstants.class); @Autowired SessionContext sessionContext; @Autowired AssignmentService<ClassAssignmentProxy> classAssignmentService; @Autowired SolverService<SolverProxy> courseTimetablingSolverService; // --------------------------------------------------------- Instance Variables // --------------------------------------------------------- Methods /** * Method execute * @param mapping * @param form * @param request * @param response * @return ActionForward */ public ActionForward execute( ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { try { // Set common lookup tables super.execute(mapping, form, request, response); InstructorEditForm frm = (InstructorEditForm) form; MessageResources rsc = getResources(request); ActionMessages errors = new ActionMessages(); //Read parameters String instructorId = (request.getParameter("instructorId")==null) ? (request.getAttribute("instructorId")==null) ? null : request.getAttribute("instructorId").toString() : request.getParameter("instructorId"); sessionContext.checkPermission(instructorId, "DepartmentalInstructor", Right.InstructorDetail); String op = frm.getOp(); // boolean timeVertical = CommonValues.VerticalGrid.eq(sessionContext.getUser().getProperty(UserProperty.GridOrientation)); if (request.getParameter("op2")!=null && request.getParameter("op2").length()>0) op = request.getParameter("op2"); //Check op exists if(op==null) throw new Exception ("Null Operation not supported."); // Read instructor id from form if(op.equals(rsc.getMessage("button.editInstructorInfo")) || op.equals(rsc.getMessage("button.editInstructorPref")) || op.equals(rsc.getMessage("button.backToInstructorList")) || op.equals(rsc.getMessage("button.displayPrefs")) || op.equals(rsc.getMessage("button.nextInstructor")) || op.equals(rsc.getMessage("button.previousInstructor")) ) { instructorId = frm.getInstructorId(); }else { frm.reset(mapping, request); } Debug.debug("op: " + op); Debug.debug("instructor: " + instructorId); //Check instructor exists if(instructorId==null || instructorId.trim()=="") throw new Exception ("Instructor Info not supplied."); // Cancel - Go back to Instructors List Screen if(op.equals(rsc.getMessage("button.backToInstructorList")) && instructorId!=null && instructorId.trim()!="") { response.sendRedirect( response.encodeURL("instructorList.do")); return null; } // If subpart id is not null - load subpart info DepartmentalInstructorDAO idao = new DepartmentalInstructorDAO(); DepartmentalInstructor inst = idao.get(new Long(instructorId)); //Edit Information - Redirect to info edit screen if(op.equals(rsc.getMessage("button.editInstructorInfo")) && instructorId!=null && instructorId.trim()!="") { sessionContext.checkPermission(instructorId, "DepartmentalInstructor", Right.InstructorEdit); response.sendRedirect( response.encodeURL("instructorInfoEdit.do?instructorId="+instructorId) ); return null; } // Edit Preference - Redirect to prefs edit screen if(op.equals(rsc.getMessage("button.editInstructorPref")) && instructorId!=null && instructorId.trim()!="") { sessionContext.checkPermission(instructorId, "DepartmentalInstructor", Right.InstructorPreferences); response.sendRedirect( response.encodeURL("instructorPrefEdit.do?instructorId="+instructorId) ); return null; } if (op.equals(rsc.getMessage("button.nextInstructor"))) { response.sendRedirect(response.encodeURL("instructorDetail.do?instructorId="+frm.getNextId())); return null; } if (op.equals(rsc.getMessage("button.previousInstructor"))) { response.sendRedirect(response.encodeURL("instructorDetail.do?instructorId="+frm.getPreviousId())); return null; } // Load form attributes that are constant doLoad(request, frm, inst, instructorId); BackTracker.markForBack( request, "instructorDetail.do?instructorId=" + instructorId, "Instructor ("+ (frm.getName()==null?"null":frm.getName().trim()) +")", true, false); //load class assignments Set allClasses = new HashSet(); for (Iterator i=DepartmentalInstructor.getAllForInstructor(inst, inst.getDepartment().getSession().getUniqueId()).iterator();i.hasNext();) { DepartmentalInstructor di = (DepartmentalInstructor)i.next(); allClasses.addAll(di.getClasses()); } if (!allClasses.isEmpty()) { boolean hasTimetable = sessionContext.hasPermission(Right.ClassAssignments); WebTable classTable = (hasTimetable? new WebTable( 9, null, new String[] {"Class", "Check Conflicts", "Share", "Limit", "Enrollment", "Manager", "Time", "Date", "Room"}, new String[] {"left", "left","left", "left", "left", "left", "left", "left", "left"}, null ) : new WebTable( 5, null, new String[] {"Class", "Check Conflicts", "Share", "Limit", "Manager"}, new String[] {"left", "left","left", "left", "left"}, null ) ); String backType = request.getParameter("backType"); String backId = request.getParameter("backId"); TreeSet classes = new TreeSet(new ClassInstructorComparator(new ClassComparator(ClassComparator.COMPARE_BY_LABEL))); classes.addAll(allClasses); Vector classIds = new Vector(classes.size()); //Get class assignment information for (Iterator iterInst = classes.iterator(); iterInst.hasNext();) { ClassInstructor ci = (ClassInstructor) iterInst.next(); Class_ c = ci.getClassInstructing(); classIds.add(c.getUniqueId()); String limitString = ""; if (!c.getSchedulingSubpart().getInstrOfferingConfig().isUnlimitedEnrollment().booleanValue()) { if (c.getExpectedCapacity() != null) { limitString = c.getExpectedCapacity().toString(); if (c.getMaxExpectedCapacity() != null && !c.getMaxExpectedCapacity().equals(c.getExpectedCapacity())){ limitString = limitString + "-" + c.getMaxExpectedCapacity().toString(); } } else { limitString = "0"; if (c.getMaxExpectedCapacity() != null && c.getMaxExpectedCapacity().intValue() != 0){ limitString = limitString + "-" + c.getMaxExpectedCapacity().toString(); } } } String enrollmentString = ""; if (c.getEnrollment() != null) { enrollmentString = c.getEnrollment().toString(); } else { enrollmentString = "0"; } String managingDept = null; if (c.getManagingDept()!=null) { Department d = c.getManagingDept(); managingDept = d.getManagingDeptAbbv(); } String assignedTime = ""; String assignedDate = ""; String assignedRoom = ""; ClassAssignmentDetails ca = ClassAssignmentDetails.createClassAssignmentDetails(sessionContext, courseTimetablingSolverService.getSolver(), c.getUniqueId(),false); if (ca == null) { try { Assignment a = classAssignmentService.getAssignment().getAssignment(c); if (a.getUniqueId() != null) ca = ClassAssignmentDetails.createClassAssignmentDetailsFromAssignment(sessionContext, a.getUniqueId(), false); } catch (Exception e) {} } if (ca != null) { if (ca.getAssignedTime() != null) { assignedTime = ca.getAssignedTime().toHtml(false, false, true, true); assignedDate = ca.getAssignedTime().getDatePatternHtml(); } if (ca.getAssignedRoom() != null) { for (int i=0;i<ca.getAssignedRoom().length;i++) { if (i>0) assignedRoom += ", "; assignedRoom += ca.getAssignedRoom()[i].toHtml(false,false,true); } } } String onClick = null; if (sessionContext.hasPermission(c, Right.ClassDetail)) { onClick = "onClick=\"document.location='classDetail.do?cid="+c.getUniqueId()+"';\""; } boolean back = "PreferenceGroup".equals(backType) && c.getUniqueId().toString().equals(backId); if (hasTimetable) { classTable.addLine( onClick, new String[] { (back?"<A name=\"back\"></A>":"")+ c.getClassLabel(), (ci.isLead().booleanValue()?"<IMG border='0' alt='true' align='absmiddle' src='images/accept.png'>":""), ci.getPercentShare()+"%", limitString, enrollmentString, managingDept, assignedTime, assignedDate, assignedRoom }, null,null); } else { classTable.addLine( onClick, new String[] { (back?"<A name=\"back\"></A>":"")+ c.getClassLabel(), (ci.isLead().booleanValue()?"<IMG border='0' alt='true' align='absmiddle' src='images/accept.png'>":""), ci.getPercentShare()+"%", limitString, managingDept }, null,null); } } Navigation.set(sessionContext, Navigation.sClassLevel, classIds); String tblData = classTable.printTable(); request.setAttribute("classTable", tblData); } if (ApplicationProperty.RoomAvailabilityIncludeInstructors.isTrue() && inst.getExternalUniqueId() != null && !inst.getExternalUniqueId().isEmpty() && RoomAvailability.getInstance() != null && RoomAvailability.getInstance() instanceof DefaultRoomAvailabilityService) { WebTable.setOrder(sessionContext, "instructorUnavailability.ord", request.getParameter("iuord"), 1); WebTable eventTable = new WebTable(5, "Instructor Unavailability", "instructorDetail.do?instructorId=" + frm.getInstructorId() + "&iuord=%%", new String[] {"Event", "Type", "Date", "Time", "Room"}, new String[] {"left", "left", "left", "left", "left"}, null); Formats.Format<Date> dfShort = Formats.getDateFormat(Formats.Pattern.DATE_EVENT_SHORT); Formats.Format<Date> dfLong = Formats.getDateFormat(Formats.Pattern.DATE_EVENT_LONG); org.hibernate.Session hibSession = EventDAO.getInstance().getSession(); Map<Event, Set<Meeting>> unavailabilities = new HashMap<Event, Set<Meeting>>(); for (Meeting meeting: (List<Meeting>)hibSession.createQuery( "select distinct m from Event e inner join e.meetings m left outer join e.additionalContacts c, Session s " + "where e.class in (CourseEvent, SpecialEvent, UnavailableEvent) and m.meetingDate >= s.eventBeginDate and m.meetingDate <= s.eventEndDate " + "and s.uniqueId = :sessionId and (e.mainContact.externalUniqueId = :user or c.externalUniqueId = :user) and m.approvalStatus = 1" ) .setLong("sessionId", sessionContext.getUser().getCurrentAcademicSessionId()) .setString("user", inst.getExternalUniqueId()) .setCacheable(true).list()) { Set<Meeting> meetings = unavailabilities.get(meeting.getEvent()); if (meetings == null) { meetings = new HashSet<Meeting>(); unavailabilities.put(meeting.getEvent(), meetings); } meetings.add(meeting); } for (Event event: new TreeSet<Event>(unavailabilities.keySet())) { for (MultiMeeting m: Event.getMultiMeetings(unavailabilities.get(event))) { String date = m.getDays() + " " + (m.getMeetings().size() == 1 ? dfLong.format(m.getMeetings().first().getMeetingDate()) : dfShort.format(m.getMeetings().first().getMeetingDate()) + " - " + dfLong.format(m.getMeetings().last().getMeetingDate())); String time = m.getMeetings().first().startTime() + " - " + m.getMeetings().first().stopTime(); String room = (m.getMeetings().first().getLocation() == null ? "" : m.getMeetings().first().getLocation().getLabelWithHint()); eventTable.addLine( sessionContext.hasPermission(event, Right.EventDetail) ? "onClick=\"showGwtDialog('Event Detail', 'gwt.jsp?page=events&menu=hide#event=" + event.getUniqueId() + "','900','85%');\"" : null, new String[] { event.getEventName(), event.getEventTypeAbbv(), date, time, room}, new Comparable[] { event.getEventName(), event.getEventType(), m.getMeetings().first().getMeetingDate(), m.getMeetings().first().getStartPeriod(), room }); } } if (!eventTable.getLines().isEmpty()) request.setAttribute("eventTable", eventTable.printTable(WebTable.getOrder(sessionContext, "instructorUnavailability.ord"))); } //// Set display distribution to Not Applicable /* request.setAttribute(DistributionPref.DIST_PREF_REQUEST_ATTR, "<FONT color=696969>Distribution Preferences Not Applicable</FONT>"); */ frm.setDisplayPrefs(CommonValues.Yes.eq(sessionContext.getUser().getProperty(UserProperty.DispInstructorPrefs))); if (op.equals(rsc.getMessage("button.displayPrefs")) || "true".equals(request.getParameter("showPrefs"))) { frm.setDisplayPrefs(true); sessionContext.getUser().setProperty(UserProperty.DispInstructorPrefs, CommonValues.Yes.value()); } if (op.equals(rsc.getMessage("button.hidePrefs")) || "false".equals(request.getParameter("showPrefs"))) { frm.setDisplayPrefs(false); sessionContext.getUser().setProperty(UserProperty.DispInstructorPrefs, CommonValues.No.value()); } if (frm.isDisplayPrefs()) { // Initialize Preferences for initial load Set timePatterns = new HashSet(); frm.setAvailableTimePatterns(null); initPrefs(frm, inst, null, false); timePatterns.add(new TimePattern(new Long(-1))); //timePatterns.addAll(TimePattern.findApplicable(request,30,false)); // Process Preferences Action processPrefAction(request, frm, errors); // Generate Time Pattern Grids //super.generateTimePatternGrids(request, frm, inst, timePatterns, "init", timeVertical, false, null); for (Preference pref: inst.getPreferences()) { if (pref instanceof TimePref) { frm.setAvailability(((TimePref)pref).getPreference()); break; } } LookupTables.setupRooms(request, inst); // Room Prefs LookupTables.setupBldgs(request, inst); // Building Prefs LookupTables.setupRoomFeatures(request, inst); // Preference Levels LookupTables.setupRoomGroups(request, inst); // Room Groups } DepartmentalInstructor previous = inst.getPreviousDepartmentalInstructor(sessionContext, Right.InstructorDetail); frm.setPreviousId(previous==null?null:previous.getUniqueId().toString()); DepartmentalInstructor next = inst.getNextDepartmentalInstructor(sessionContext, Right.InstructorDetail); frm.setNextId(next==null?null:next.getUniqueId().toString()); return mapping.findForward("showInstructorDetail"); } catch (Exception e) { Debug.error(e); throw e; } } /** * Loads the non-editable instructor info into the form * @param request * @param frm * @param inst * @param instructorId */ private void doLoad(HttpServletRequest request, InstructorEditForm frm, DepartmentalInstructor inst, String instructorId) { // populate form frm.setInstructorId(instructorId); NameFormat nameFormat = NameFormat.fromReference(sessionContext.getUser().getProperty(UserProperty.NameFormat)); frm.setName(nameFormat.format(inst)); frm.setEmail(inst.getEmail()); String puid = inst.getExternalUniqueId(); if (puid != null) { frm.setPuId(puid); } if (inst.getPositionType() != null) { frm.setPosType(inst.getPositionType().getLabel().trim()); } if (inst.getCareerAcct() != null) { frm.setCareerAcct(inst.getCareerAcct().trim()); } else if (DepartmentalInstructor.canLookupInstructor()) { try { UserInfo user = DepartmentalInstructor.lookupInstructor(puid); if (user != null && user.getUserName() != null) frm.setCareerAcct(user.getUserName()); } catch (Exception e) {} } if (inst.getNote() != null) { frm.setNote(inst.getNote().trim()); } request.getSession().setAttribute(Constants.DEPT_ID_ATTR_NAME, inst.getDepartment().getUniqueId().toString()); // Check column ordering - default to name String orderStr = request.getParameter("order"); int cols = 2; int order = 1; if (orderStr != null && orderStr.trim().length() != 0) { try { order = Integer.parseInt(orderStr); if (Math.abs(order) > cols) order = 1; } catch (Exception e) { order = 1; } } frm.setIgnoreDist(inst.isIgnoreToFar()==null?false:inst.isIgnoreToFar().booleanValue()); } }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2007 IBM Corporation * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.reader; import org.dita.dost.exception.DITAOTException; import org.dita.dost.log.MessageUtils; import org.dita.dost.module.ChunkModule.ChunkFilenameGenerator; import org.dita.dost.module.ChunkModule.ChunkFilenameGeneratorFactory; import org.dita.dost.module.reader.TempFileNameScheme; import org.dita.dost.util.DitaClass; import org.dita.dost.util.Job; import org.dita.dost.util.Job.FileInfo; import org.dita.dost.util.XMLSerializer; import org.dita.dost.writer.AbstractDomFilter; import org.dita.dost.writer.ChunkTopicParser; import org.dita.dost.writer.SeparateChunkTopicParser; import org.w3c.dom.*; import org.xml.sax.SAXException; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamWriter; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.util.*; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableSet; import static org.apache.commons.io.FilenameUtils.getBaseName; import static org.dita.dost.chunk.ChunkModule.getDitaVersion; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.FileUtils.getFragment; import static org.dita.dost.util.FileUtils.replaceExtension; import static org.dita.dost.util.StringUtils.join; import static org.dita.dost.util.StringUtils.split; import static org.dita.dost.util.URLUtils.*; import static org.dita.dost.util.XMLUtils.*; /** * ChunkMapReader class, read and filter ditamap file for chunking. */ // TODO rename this because this is not a reader, it's a filter public final class ChunkMapReader extends AbstractDomFilter { public static final String FILE_NAME_STUB_DITAMAP = "stub.ditamap"; public static final String FILE_EXTENSION_CHUNK = ".chunk"; public static final String ATTR_XTRF_VALUE_GENERATED = "generated_by_chunk"; public static final String CHUNK_SELECT_BRANCH = "select-branch"; public static final String CHUNK_SELECT_TOPIC = "select-topic"; public static final String CHUNK_SELECT_DOCUMENT = "select-document"; private static final String CHUNK_BY_DOCUMENT = "by-document"; private static final String CHUNK_BY_TOPIC = "by-topic"; public static final String CHUNK_TO_CONTENT = "to-content"; public static final String CHUNK_TO_NAVIGATION = "to-navigation"; public static final String CHUNK_PREFIX = "Chunk"; private TempFileNameScheme tempFileNameScheme; private Collection<String> rootChunkOverride; private String defaultChunkByToken; // ChunkTopicParser assumes keys and values are chimera paths, i.e. systems paths with fragments. private final LinkedHashMap<URI, URI> changeTable = new LinkedHashMap<>(128); private final Map<URI, URI> conflictTable = new HashMap<>(128); private boolean supportToNavigation; private ProcessingInstruction workdir = null; private ProcessingInstruction workdirUrl = null; private ProcessingInstruction path2proj = null; private ProcessingInstruction path2projUrl = null; private ProcessingInstruction path2rootmapUrl = null; private final ChunkFilenameGenerator chunkFilenameGenerator = ChunkFilenameGeneratorFactory.newInstance(); @Override public void setJob(final Job job) { super.setJob(job); try { tempFileNameScheme = (TempFileNameScheme) Class.forName(job.getProperty("temp-file-name-scheme")).newInstance(); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { throw new RuntimeException(e); } tempFileNameScheme.setBaseDir(job.getInputDir()); } public void setRootChunkOverride(final String chunkValue) { rootChunkOverride = split(chunkValue); } /** * Absolute URI to file being processed */ private URI currentFile; /** * read input file. * * @param inputFile filename */ @Override public void read(final File inputFile) throws DITAOTException { this.currentFile = inputFile.toURI(); super.read(inputFile); } @Override public Document process(final Document doc) { final Float ditaVersion = getDitaVersion(doc.getDocumentElement()); if (ditaVersion == null ||ditaVersion >= 2.0f) { return doc; } readLinks(doc); readProcessingInstructions(doc); final Element root = doc.getDocumentElement(); if (rootChunkOverride != null) { final String c = join(rootChunkOverride, " "); logger.debug("Use override root chunk \"" + c + "\""); root.setAttribute(ATTRIBUTE_NAME_CHUNK, c); } final Collection<String> rootChunk = split(root.getAttribute(ATTRIBUTE_NAME_CHUNK)); defaultChunkByToken = getChunkByToken(rootChunk, "by-", CHUNK_BY_DOCUMENT); if (rootChunk.contains(CHUNK_TO_CONTENT)) { chunkMap(root); } else { for (final Element currentElem : getChildElements(root)) { if (MAP_RELTABLE.matches(currentElem)) { updateReltable(currentElem); } else if (MAP_TOPICREF.matches(currentElem)) { processTopicref(currentElem); } } } return buildOutputDocument(root); } private final Set<URI> chunkTopicSet = new HashSet<>(); /** * @return absolute temporary files */ public Set<URI> getChunkTopicSet() { return unmodifiableSet(chunkTopicSet); } private void readLinks(final Document doc) { final Element root = doc.getDocumentElement(); readLinks(root, false, false); } private void readLinks(final Element elem, final boolean chunk, final boolean disabled) { final boolean c = chunk || elem.getAttributeNode(ATTRIBUTE_NAME_CHUNK) != null; final boolean d = disabled || elem.getAttribute(ATTRIBUTE_NAME_CHUNK).contains(CHUNK_TO_NAVIGATION) || (MAPGROUP_D_TOPICGROUP.matches(elem) && !SUBMAP.matches(elem)) || MAP_RELTABLE.matches(elem); final Attr href = elem.getAttributeNode(ATTRIBUTE_NAME_HREF); if (href != null) { final URI filename = stripFragment(currentFile.resolve(href.getValue())); if (c && !d) { chunkTopicSet.add(filename); final Attr copyTo = elem.getAttributeNode(ATTRIBUTE_NAME_COPY_TO); if (copyTo != null) { final URI copyToFile = stripFragment(currentFile.resolve(copyTo.getValue())); chunkTopicSet.add(copyToFile); } } } for (final Element topicref : getChildElements(elem, MAP_TOPICREF)) { readLinks(topicref, c, d); } } public static String getChunkByToken(final Collection<String> chunkValue, final String category, final String defaultToken) { if (chunkValue.isEmpty()) { return defaultToken; } for (final String token : chunkValue) { if (token.startsWith(category)) { return token; } } return defaultToken; } /** * Process map when "to-content" is specified on map element. * <p> * TODO: Instead of reclassing map element to be a topicref, add a topicref * into the map root and move all map content into that topicref. */ private void chunkMap(final Element root) { // create the reference to the new file on root element. String newFilename = replaceExtension(new File(currentFile).getName(), FILE_EXTENSION_DITA); URI newFile = currentFile.resolve(newFilename); if (job.getStore().exists(newFile)) { final URI oldFile = newFile; newFilename = chunkFilenameGenerator.generateFilename(CHUNK_PREFIX, FILE_EXTENSION_DITA); newFile = currentFile.resolve(newFilename); // Mark up the possible name changing, in case that references might be updated. conflictTable.put(newFile, oldFile.normalize()); } changeTable.put(newFile, newFile); // change the class attribute to "topicref" final String origCls = root.getAttribute(ATTRIBUTE_NAME_CLASS); root.setAttribute(ATTRIBUTE_NAME_CLASS, origCls + MAP_TOPICREF.matcher); root.setAttribute(ATTRIBUTE_NAME_HREF, toURI(newFilename).toString()); createTopicStump(newFile); // process chunk processTopicref(root); // restore original root element if (origCls != null) { root.setAttribute(ATTRIBUTE_NAME_CLASS, origCls); } root.removeAttribute(ATTRIBUTE_NAME_HREF); } /** * Create the new topic stump. */ private void createTopicStump(final URI newFile) { try (final OutputStream newFileWriter = job.getStore().getOutputStream(newFile)) { final XMLStreamWriter o = XMLOutputFactory.newInstance().createXMLStreamWriter(newFileWriter, UTF8); o.writeStartDocument(); o.writeProcessingInstruction(PI_WORKDIR_TARGET, UNIX_SEPARATOR + new File(newFile.resolve(".")).getAbsolutePath()); o.writeProcessingInstruction(PI_WORKDIR_TARGET_URI, newFile.resolve(".").toString()); o.writeStartElement(ELEMENT_NAME_DITA); o.writeEndElement(); o.writeEndDocument(); o.close(); newFileWriter.flush(); } catch (final RuntimeException e) { throw e; } catch (final Exception e) { logger.error(e.getMessage(), e); } } /** * Read processing metadata from processing instructions. */ private void readProcessingInstructions(final Document doc) { final NodeList docNodes = doc.getChildNodes(); for (int i = 0; i < docNodes.getLength(); i++) { final Node node = docNodes.item(i); if (node.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) { final ProcessingInstruction pi = (ProcessingInstruction) node; switch (pi.getNodeName()) { case PI_WORKDIR_TARGET: workdir = pi; break; case PI_WORKDIR_TARGET_URI: workdirUrl = pi; break; case PI_PATH2PROJ_TARGET: path2proj = pi; break; case PI_PATH2PROJ_TARGET_URI: path2projUrl = pi; break; case PI_PATH2ROOTMAP_TARGET_URI: path2rootmapUrl = pi; break; } } } } private void outputMapFile(final URI file, final Document doc) { try { job.getStore().writeDocument(doc, file); } catch (final IOException e) { logger.error("Failed to serialize map: " + e.getMessage(), e); } } private Document buildOutputDocument(final Element root) { final Document doc = getDocumentBuilder().newDocument(); if (workdir != null) { doc.appendChild(doc.importNode(workdir, true)); } if (workdirUrl != null) { doc.appendChild(doc.importNode(workdirUrl, true)); } if (path2proj != null) { doc.appendChild(doc.importNode(path2proj, true)); } if (path2projUrl != null) { doc.appendChild(doc.importNode(path2projUrl, true)); } if (path2rootmapUrl != null) { doc.appendChild(doc.importNode(path2rootmapUrl, true)); } doc.appendChild(doc.importNode(root, true)); return doc; } private void processTopicref(final Element topicref) { final String xtrf = getValue(topicref, ATTRIBUTE_NAME_XTRF); if (xtrf != null && xtrf.contains(ATTR_XTRF_VALUE_GENERATED)) { return; } final Collection<String> chunk = split(getValue(topicref, ATTRIBUTE_NAME_CHUNK)); final URI href = toURI(getValue(topicref, ATTRIBUTE_NAME_HREF)); final URI copyTo = toURI(getValue(topicref, ATTRIBUTE_NAME_COPY_TO)); final String scope = getCascadeValue(topicref, ATTRIBUTE_NAME_SCOPE); final String chunkByToken = getChunkByToken(chunk, "by-", defaultChunkByToken); if (ATTR_SCOPE_VALUE_EXTERNAL.equals(scope) || (href != null && !job.getStore().exists(currentFile.resolve(href.toString()))) || (chunk.isEmpty() && href == null)) { processChildTopicref(topicref); } else if (chunk.contains(CHUNK_TO_CONTENT)) { if (href != null || copyTo != null || topicref.hasChildNodes()) { if (chunk.contains(CHUNK_BY_TOPIC)) { logger.warn(MessageUtils.getMessage("DOTJ064W").setLocation(topicref).toString()); } if (href == null) { generateStumpTopic(topicref); } processCombineChunk(topicref); } } else if (chunk.contains(CHUNK_TO_NAVIGATION) && supportToNavigation) { processChildTopicref(topicref); processNavitation(topicref); } else if (chunkByToken.equals(CHUNK_BY_TOPIC)) { if (href != null) { processSeparateChunk(topicref); } processChildTopicref(topicref); } else { // chunkByToken.equals(CHUNK_BY_DOCUMENT) URI currentPath = null; if (copyTo != null) { currentPath = currentFile.resolve(copyTo); } else if (href != null) { currentPath = currentFile.resolve(href); } if (currentPath != null) { changeTable.remove(currentPath); final String processingRole = getCascadeValue(topicref, ATTRIBUTE_NAME_PROCESSING_ROLE); if (!ATTR_PROCESSING_ROLE_VALUE_RESOURCE_ONLY.equals(processingRole)) { changeTable.put(currentPath, currentPath); } } processChildTopicref(topicref); } } /** * Create new map and refer to it with navref. */ private void processNavitation(final Element topicref) { // create new map's root element final Element root = (Element) topicref.getOwnerDocument().getDocumentElement().cloneNode(false); // create navref element final Element navref = topicref.getOwnerDocument().createElement(MAP_NAVREF.localName); final String newMapFile = chunkFilenameGenerator.generateFilename("MAPCHUNK", FILE_EXTENSION_DITAMAP); navref.setAttribute(ATTRIBUTE_NAME_MAPREF, newMapFile); navref.setAttribute(ATTRIBUTE_NAME_CLASS, MAP_NAVREF.toString()); // replace topicref with navref topicref.getParentNode().replaceChild(navref, topicref); root.appendChild(topicref); // generate new file final URI navmap = currentFile.resolve(newMapFile); changeTable.put(stripFragment(navmap), stripFragment(navmap)); outputMapFile(navmap, buildOutputDocument(root)); } /** * Generate file name. * * @return generated file name */ private String generateFilename() { return chunkFilenameGenerator.generateFilename(CHUNK_PREFIX, FILE_EXTENSION_DITA); } /** * Generate stump topic for to-content content. * * @param topicref topicref without href to generate stump topic for */ private void generateStumpTopic(final Element topicref) { final URI result = getResultFile(topicref); final URI temp = tempFileNameScheme.generateTempFileName(result); final URI absTemp = job.tempDir.toURI().resolve(temp); final String name = getBaseName(new File(result).getName()); String navtitle = getChildElementValueOfTopicmeta(topicref, TOPIC_NAVTITLE); if (navtitle == null) { navtitle = getValue(topicref, ATTRIBUTE_NAME_NAVTITLE); } String shortDesc = getChildElementValueOfTopicmeta(topicref, TOPIC_SHORTDESC); if (shortDesc == null) { shortDesc = getChildElementValueOfTopicmeta(topicref, MAP_SHORTDESC); } writeChunk(absTemp, name, navtitle, shortDesc); // update current element's @href value final URI relativePath = getRelativePath(currentFile.resolve(FILE_NAME_STUB_DITAMAP), absTemp); topicref.setAttribute(ATTRIBUTE_NAME_HREF, relativePath.toString()); if (MAPGROUP_D_TOPICGROUP.matches(topicref)) { topicref.setAttribute(ATTRIBUTE_NAME_CLASS, MAP_TOPICREF.toString()); } final URI relativeToBase = getRelativePath(job.tempDirURI.resolve("dummy"), absTemp); final FileInfo fi = new FileInfo.Builder() .uri(temp) .result(result) .format(ATTR_FORMAT_VALUE_DITA) .build(); job.add(fi); } private void writeChunk(final URI outputFileName, String id, String title, String shortDesc) { try (final OutputStream output = job.getStore().getOutputStream(outputFileName)) { final XMLSerializer serializer = XMLSerializer.newInstance(output); serializer.writeStartDocument(); if (title == null && shortDesc == null) { //topicgroup with no title, no shortdesc, just need a non titled stub serializer.writeStartElement(ELEMENT_NAME_DITA); serializer.writeAttribute(DITA_NAMESPACE, ATTRIBUTE_PREFIX_DITAARCHVERSION + ":" + ATTRIBUTE_NAME_DITAARCHVERSION, "1.3"); serializer.writeEndElement(); // dita } else { serializer.writeStartElement(TOPIC_TOPIC.localName); serializer.writeAttribute(DITA_NAMESPACE, ATTRIBUTE_PREFIX_DITAARCHVERSION + ":" + ATTRIBUTE_NAME_DITAARCHVERSION, "1.3"); serializer.writeAttribute(ATTRIBUTE_NAME_ID, id); serializer.writeAttribute(ATTRIBUTE_NAME_CLASS, TOPIC_TOPIC.toString()); serializer.writeAttribute(ATTRIBUTE_NAME_DOMAINS, ""); serializer.writeAttribute(ATTRIBUTE_NAME_SPECIALIZATIONS, ""); serializer.writeStartElement(TOPIC_TITLE.localName); serializer.writeAttribute(ATTRIBUTE_NAME_CLASS, TOPIC_TITLE.toString()); if (title != null) { serializer.writeCharacters(title); } serializer.writeEndElement(); // title if (shortDesc != null) { serializer.writeStartElement(TOPIC_SHORTDESC.localName); serializer.writeAttribute(ATTRIBUTE_NAME_CLASS, TOPIC_SHORTDESC.toString()); serializer.writeCharacters(shortDesc); serializer.writeEndElement(); // shortdesc } serializer.writeEndElement(); // topic } serializer.writeEndDocument(); serializer.close(); } catch (final IOException | SAXException e) { logger.error("Failed to write generated chunk: " + e.getMessage(), e); } } private URI getResultFile(final Element topicref) { final FileInfo curr = job.getFileInfo(currentFile); final URI copyTo = toURI(getValue(topicref, ATTRIBUTE_NAME_COPY_TO)); final String id = getValue(topicref, ATTRIBUTE_NAME_ID); URI outputFileName; if (copyTo != null) { outputFileName = curr.result.resolve(copyTo); } else if (id != null) { outputFileName = curr.result.resolve(id + FILE_EXTENSION_DITA); } else { final Set<URI> results = job.getFileInfo().stream().map(fi -> fi.result).collect(Collectors.toSet()); do { outputFileName = curr.result.resolve(generateFilename()); } while (results.contains(outputFileName)); } return outputFileName; } /** * get topicmeta's child(e.g navtitle, shortdesc) tag's value(text-only). * * @param element input element * @return text value */ private String getChildElementValueOfTopicmeta(final Element element, final DitaClass classValue) { if (element.hasChildNodes()) { final Element topicMeta = getElementNode(element, MAP_TOPICMETA); if (topicMeta != null) { final Element elem = getElementNode(topicMeta, classValue); if (elem != null) { return getText(elem); } } } return null; } private void processChildTopicref(final Element node) { final List<Element> children = getChildElements(node, MAP_TOPICREF); for (final Element currentElem : children) { final URI href = toURI(getValue(currentElem, ATTRIBUTE_NAME_HREF)); final String xtrf = currentElem.getAttribute(ATTRIBUTE_NAME_XTRF); if (href == null) { processTopicref(currentElem); } else if (!ATTR_XTRF_VALUE_GENERATED.equals(xtrf) && !currentFile.resolve(href).equals(changeTable.get(currentFile.resolve(href)))) { processTopicref(currentElem); } } } private void processSeparateChunk(final Element topicref) { final SeparateChunkTopicParser chunkParser = new SeparateChunkTopicParser(); chunkParser.setLogger(logger); chunkParser.setJob(job); chunkParser.setup(changeTable, conflictTable, topicref, chunkFilenameGenerator); chunkParser.write(currentFile); } private void processCombineChunk(final Element topicref) { final ChunkTopicParser chunkParser = new ChunkTopicParser(); chunkParser.setLogger(logger); chunkParser.setJob(job); createChildTopicrefStubs(getChildElements(topicref, MAP_TOPICREF)); chunkParser.setup(changeTable, conflictTable, topicref, chunkFilenameGenerator); chunkParser.write(currentFile); } /** Before combining topics in a branch, ensure any descendant topicref with @chunk and no @href has a stub */ private void createChildTopicrefStubs(final List<Element> topicrefs) { if (!topicrefs.isEmpty()) { for (final Element currentElem : topicrefs) { final String href = getValue(currentElem, ATTRIBUTE_NAME_HREF); final String chunk = getValue(currentElem,ATTRIBUTE_NAME_CHUNK); if (href == null && chunk != null) { generateStumpTopic(currentElem); } createChildTopicrefStubs(getChildElements(currentElem, MAP_TOPICREF)); } } } private void updateReltable(final Element elem) { final String href = elem.getAttribute(ATTRIBUTE_NAME_HREF); if (href.length() != 0) { if (changeTable.containsKey(currentFile.resolve(href))) { URI res = getRelativePath(currentFile.resolve(FILE_NAME_STUB_DITAMAP), currentFile.resolve(href)); final String fragment = getFragment(href); if (fragment != null) { res = setFragment(res, fragment); } elem.setAttribute(ATTRIBUTE_NAME_HREF, res.toString()); } } final NodeList children = elem.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { final Node current = children.item(i); if (current.getNodeType() == Node.ELEMENT_NODE) { final Element currentElem = (Element) current; final String cls = currentElem.getAttribute(ATTRIBUTE_NAME_CLASS); if (MAP_TOPICREF.matches(cls)) { // FIXME: What should happen here? } } } } /** * Get changed files table. * * @return map of changed files, absolute temporary files */ public Map<URI, URI> getChangeTable() { for (final Map.Entry<URI, URI> e : changeTable.entrySet()) { assert e.getKey().isAbsolute(); assert e.getValue().isAbsolute(); } return Collections.unmodifiableMap(changeTable); } /** * get conflict table. * * @return conflict table, absolute temporary files */ public Map<URI, URI> getConflicTable() { for (final Map.Entry<URI, URI> e : conflictTable.entrySet()) { assert e.getKey().isAbsolute(); assert e.getValue().isAbsolute(); } return conflictTable; } /** * Support chunk token to-navigation. * * @param supportToNavigation flag to enable to-navigation support */ public void supportToNavigation(final boolean supportToNavigation) { this.supportToNavigation = supportToNavigation; } }
package natlab.backends.Fortran.codegen_readable; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import natlab.backends.Fortran.codegen_readable.FortranAST_readable.Subprogram; import natlab.options.Mc2ForOptions; import natlab.tame.BasicTamerTool; import natlab.tame.callgraph.StaticFunction; import natlab.tame.tamerplus.analysis.AnalysisEngine; import natlab.tame.tamerplus.transformation.TransformationEngine; import natlab.tame.valueanalysis.ValueAnalysis; import natlab.tame.valueanalysis.ValueFlowMap; import natlab.tame.valueanalysis.aggrvalue.AggrValue; import natlab.tame.valueanalysis.basicmatrix.BasicMatrixValue; import natlab.toolkits.filehandling.GenericFile; import natlab.toolkits.path.FileEnvironment; import ast.ASTNode; import ast.Function; public class Main_readable { static boolean Debug =false; /** * This main method is just for testing, doesn't follow the convention when passing a * file to a program, please replace "fileDir and entry" below with your real testing * file directory and entry func, and you can pass the type info of the input argument * to the program, currently, the type info is composed like double&3*3&REAL. */ public static void main(String[] args) { String fileDir = "/home/aaron/Dropbox/benchmarks/testload/"; String entryPointFile = "test_load"; GenericFile gFile = GenericFile.create(fileDir + entryPointFile + ".m"); FileEnvironment env = new FileEnvironment(gFile); //get path environment obj BasicTamerTool tool = new BasicTamerTool(); ValueAnalysis<AggrValue<BasicMatrixValue>> analysis = tool.analyze(args, env); int size = analysis.getNodeList().size(); Set<String> visitedFunctions = new HashSet<String>(); // preprocess to get all the names of the user defined functions in the program. Set<String> userDefinedFunctions = new HashSet<String>(); for (int i = 0; i < size; i++) { String functionName = analysis.getNodeList().get(i).getFunction().getName(); if (!functionName.equals(entryPointFile)) { userDefinedFunctions.add(functionName); } } /* * run tamer plus analysis first, then using the AST from tamer plus * to generate fortran AST and let the AST pretty print itself. */ for (int i = 0; i < size; i++) { // currently, I don't know why there are multiple same functions in the node list. TODO String functionName = analysis.getNodeList().get(i).getFunction().getName(); if (!visitedFunctions.contains(functionName)) { visitedFunctions.add(functionName); /* * type inference. */ ValueFlowMap<AggrValue<BasicMatrixValue>> currentOutSet = analysis.getNodeList().get(i).getAnalysis().getCurrentOutSet(); // System.err.println(currentOutSet); /* * tamer plus analysis. */ StaticFunction function = analysis.getNodeList().get(i).getFunction(); // TamerPlusUtils.debugMode(); // System.out.println("tamer pretty print: \n"+function.getAst().getPrettyPrinted()); TransformationEngine transformationEngine = TransformationEngine .forAST(function.getAst()); AnalysisEngine analysisEngine = transformationEngine .getAnalysisEngine(); @SuppressWarnings("rawtypes") ASTNode fTree = transformationEngine .getTIRToMcSAFIRWithoutTemp().getTransformedTree(); Set<String> remainingVars = analysisEngine .getTemporaryVariablesRemovalAnalysis().getRemainingVariablesNames(); System.err.println("\ntamer plus analysis result: \n" + fTree.getPrettyPrinted() + "\n"); if (Debug) System.err.println("remaining variables: \n"+remainingVars); /* * Fortran code generation. */ Subprogram subprogram = FortranCodeASTGenerator.generateFortran( (Function)fTree, currentOutSet, remainingVars, entryPointFile, userDefinedFunctions, analysisEngine, true); // nocheck StringBuffer sb = new StringBuffer(); String currentFunction = subprogram.getProgramTitle().getProgramName(); String subprogramType = subprogram.getProgramTitle().getProgramType(); if (subprogramType.equals("SUBROUTINE")) { sb.append("MODULE mod_"+currentFunction+"\n\nCONTAINS\n\n"); subprogram.pp(sb); sb.append("\nEND MODULE"); } else { subprogram.pp(sb); } String output = sb.toString(); /* * since variable name in matlab is case-sensitive, while in fortran * it's case-insensitive, so we have to rename the variable whose * name is case-insensitively equivalent to another variable. */ Map<String, ArrayList<String>> eqNameVars = new HashMap<String, ArrayList<String>>(); for (String name : remainingVars) { for (String iterateVar : remainingVars) { if (!name.equals(iterateVar) && name.toLowerCase().equals(iterateVar.toLowerCase())) { if (eqNameVars.containsKey(name.toLowerCase())) { ArrayList<String> valueList = eqNameVars.get(name); if (!valueList.contains(name)) { valueList.add(name); } } else { ArrayList<String> valueList = new ArrayList<String>(); valueList.add(name); eqNameVars.put(name.toLowerCase(), valueList); } } } } if (Debug) System.out.println("variables are " + "case-insensitively equivalent:" + eqNameVars); for (String key : eqNameVars.keySet()) { for (int j = 0; j < eqNameVars.get(key).size(); j++) { String tempVar = eqNameVars.get(key).get(j); if (j != 0) { output = output.replaceAll("\\b" + tempVar + "\\b", tempVar + "_rn" + j); } } } System.err.println("pretty print the generated Fortran code:"); System.out.println(output); // write the generated fortran code to files. try { BufferedWriter out = new BufferedWriter( new FileWriter(fileDir + currentFunction + ".f95")); out.write(output); out.flush(); out.close(); } catch(IOException e) { System.err.println(e); } } else { // already visited, do nothing. } } } public static void compile(Mc2ForOptions options) { FileEnvironment fileEnvironment = new FileEnvironment(options); //get path/files //arguments - TODO for now just parse them as inputs String args = "double&1*1"; //start with the default if (options.arguments() != null && options.arguments().length() > 0){ args = options.arguments(); } // TODO now it's for testing... String[] argsList = {args}; BasicTamerTool tool = new BasicTamerTool(); ValueAnalysis<AggrValue<BasicMatrixValue>> analysis = tool.analyze(argsList, fileEnvironment); int size = analysis.getNodeList().size(); Set<String> visitedFunctions = new HashSet<String>(); // preprocess to get all the names of the user defined functions in the program. Set<String> userDefinedFunctions = new HashSet<String>(); for (int i = 0; i < size; i++) { String functionName = analysis.getNodeList().get(i).getFunction().getName(); if (!functionName.equals(fileEnvironment.getMainFile().getName().replace(".m", ""))) { userDefinedFunctions.add(functionName); } } /* * run tamer plus analysis first, then using the AST from tamer plus * to generate fortran AST and let the AST pretty print itself. */ for (int i = 0; i < size; i++) { // currently, I don't know why there are multiple same functions in the node list. TODO String functionName = analysis.getNodeList().get(i).getFunction().getName(); if (!visitedFunctions.contains(functionName)) { visitedFunctions.add(functionName); /* * type inference. */ ValueFlowMap<AggrValue<BasicMatrixValue>> currentOutSet = analysis.getNodeList().get(i).getAnalysis().getCurrentOutSet(); // System.err.println(currentOutSet); /* * tamer plus analysis. */ StaticFunction function = analysis.getNodeList().get(i).getFunction(); // TamerPlusUtils.debugMode(); // System.out.println("tamer pretty print: \n"+function.getAst().getPrettyPrinted()); TransformationEngine transformationEngine = TransformationEngine .forAST(function.getAst()); AnalysisEngine analysisEngine = transformationEngine .getAnalysisEngine(); @SuppressWarnings("rawtypes") ASTNode fTree = transformationEngine .getTIRToMcSAFIRWithoutTemp().getTransformedTree(); Set<String> remainingVars = analysisEngine .getTemporaryVariablesRemovalAnalysis().getRemainingVariablesNames(); System.err.println("\ntamer plus analysis result: \n" + fTree.getPrettyPrinted() + "\n"); if (Debug) System.err.println("remaining variables: \n"+remainingVars); /* * Fortran code generation. */ Subprogram subprogram = FortranCodeASTGenerator.generateFortran( (Function)fTree, currentOutSet, remainingVars, fileEnvironment.getMainFile().getName().replace(".m", ""), userDefinedFunctions, analysisEngine, options.nocheck()); StringBuffer sb = new StringBuffer(); String currentFunction = subprogram.getProgramTitle().getProgramName(); String subprogramType = subprogram.getProgramTitle().getProgramType(); if (subprogramType.equals("SUBROUTINE")) { sb.append("MODULE mod_"+currentFunction+"\n\nCONTAINS\n\n"); subprogram.pp(sb); sb.append("\nEND MODULE"); } else { subprogram.pp(sb); } String output = sb.toString(); /* * since variable name in matlab is case-sensitive, while in fortran * it's case-insensitive, so we have to rename the variable whose * name is case-insensitively equivalent to another variable. */ Map<String, ArrayList<String>> eqNameVars = new HashMap<String, ArrayList<String>>(); for (String name : remainingVars) { for (String iterateVar : remainingVars) { if (!name.equals(iterateVar) && name.toLowerCase().equals(iterateVar.toLowerCase())) { if (eqNameVars.containsKey(name.toLowerCase())) { ArrayList<String> valueList = eqNameVars.get(name); if (!valueList.contains(name)) { valueList.add(name); } } else { ArrayList<String> valueList = new ArrayList<String>(); valueList.add(name); eqNameVars.put(name.toLowerCase(), valueList); } } } } if (Debug) System.out.println("variables are " + "case-insensitively equivalent:" + eqNameVars); for (String key : eqNameVars.keySet()) { for (int j = 0; j < eqNameVars.get(key).size(); j++) { String tempVar = eqNameVars.get(key).get(j); if (j != 0) { output = output.replaceAll("\\b" + tempVar + "\\b", tempVar + "_rn" + j); } } } if (options.nocheck()) { System.err.println("***without run-time ABC code***"); } else { System.err.println("***with run-time ABC code***"); } System.err.println("pretty print the generated Fortran code:"); System.out.println(output); // write the transformed result to files. try { BufferedWriter out = new BufferedWriter(new FileWriter( fileEnvironment.getPwd().getPath() + "/" + function.getName() + ".f95")); out.write(output); out.flush(); out.close(); } catch (IOException e) { System.err.println(e); } } else { // already visited, do nothing. } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.runtime.common.evaluator.task; import com.google.protobuf.ByteString; import org.apache.reef.driver.task.TaskConfigurationOptions; import org.apache.reef.evaluator.context.parameters.ContextIdentifier; import org.apache.reef.proto.ReefServiceProtos; import org.apache.reef.runtime.common.evaluator.HeartBeatManager; import org.apache.reef.runtime.common.utils.ExceptionCodec; import org.apache.reef.tang.annotations.Parameter; import org.apache.reef.task.TaskMessage; import org.apache.reef.task.TaskMessageSource; import org.apache.reef.util.Optional; import javax.inject.Inject; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * Represents the various states a Task could be in. */ public final class TaskStatus { private static final Logger LOG = Logger.getLogger(TaskStatus.class.getName()); private final String taskId; private final String contextId; private final HeartBeatManager heartBeatManager; private final Set<TaskMessageSource> evaluatorMessageSources; private final ExceptionCodec exceptionCodec; private Optional<Throwable> lastException = Optional.empty(); private Optional<byte[]> result = Optional.empty(); private State state = State.PRE_INIT; @Inject TaskStatus(@Parameter(TaskConfigurationOptions.Identifier.class) final String taskId, @Parameter(ContextIdentifier.class) final String contextId, @Parameter(TaskConfigurationOptions.TaskMessageSources.class) final Set<TaskMessageSource> evaluatorMessageSources, final HeartBeatManager heartBeatManager, final ExceptionCodec exceptionCodec) { this.taskId = taskId; this.contextId = contextId; this.heartBeatManager = heartBeatManager; this.evaluatorMessageSources = evaluatorMessageSources; this.exceptionCodec = exceptionCodec; } /** * @param from * @param to * @return true, if the state transition from state 'from' to state 'to' is legal. */ private static boolean isLegal(final State from, final State to) { if (from == null) { return to == State.INIT; } switch (from) { case PRE_INIT: switch (to) { case INIT: return true; default: return false; } case INIT: switch (to) { case RUNNING: case FAILED: case KILLED: case DONE: return true; default: return false; } case RUNNING: switch (to) { case CLOSE_REQUESTED: case SUSPEND_REQUESTED: case FAILED: case KILLED: case DONE: return true; default: return false; } case CLOSE_REQUESTED: switch (to) { case FAILED: case KILLED: case DONE: return true; default: return false; } case SUSPEND_REQUESTED: switch (to) { case FAILED: case KILLED: case SUSPENDED: return true; default: return false; } case FAILED: case DONE: case KILLED: return false; default: return false; } } public String getTaskId() { return this.taskId; } ReefServiceProtos.TaskStatusProto toProto() { this.check(); final ReefServiceProtos.TaskStatusProto.Builder resultBuilder = ReefServiceProtos.TaskStatusProto.newBuilder() .setContextId(this.contextId) .setTaskId(this.taskId) .setState(this.getProtoState()); if (this.result.isPresent()) { resultBuilder.setResult(ByteString.copyFrom(this.result.get())); } else if (this.lastException.isPresent()) { final byte[] error = this.exceptionCodec.toBytes(this.lastException.get()); resultBuilder.setResult(ByteString.copyFrom(error)); } else if (this.state == State.RUNNING) { for (final TaskMessage taskMessage : this.getMessages()) { resultBuilder.addTaskMessage(ReefServiceProtos.TaskStatusProto.TaskMessageProto.newBuilder() .setSourceId(taskMessage.getMessageSourceID()) .setMessage(ByteString.copyFrom(taskMessage.get())) .build()); } } return resultBuilder.build(); } private void check() { if (this.result.isPresent() && this.lastException.isPresent()) { throw new RuntimeException("Found both an exception and a result. This is unsupported."); } } private ReefServiceProtos.State getProtoState() { switch (this.state) { case INIT: return ReefServiceProtos.State.INIT; case CLOSE_REQUESTED: case SUSPEND_REQUESTED: case RUNNING: return ReefServiceProtos.State.RUNNING; case DONE: return ReefServiceProtos.State.DONE; case SUSPENDED: return ReefServiceProtos.State.SUSPEND; case FAILED: return ReefServiceProtos.State.FAILED; case KILLED: return ReefServiceProtos.State.KILLED; default: throw new RuntimeException("Unknown state: " + this.state); } } void setException(final Throwable throwable) { synchronized (this.heartBeatManager) { this.lastException = Optional.of(throwable); this.state = State.FAILED; this.check(); this.heartbeat(); } } void setResult(final byte[] result) { synchronized (this.heartBeatManager) { this.result = Optional.ofNullable(result); if (this.state == State.RUNNING) { this.setState(State.DONE); } else if (this.state == State.SUSPEND_REQUESTED) { this.setState(State.SUSPENDED); } else if (this.state == State.CLOSE_REQUESTED) { this.setState(State.DONE); } this.check(); this.heartbeat(); } } private void heartbeat() { this.heartBeatManager.sendTaskStatus(this.toProto()); } /** * Sets the state to INIT and informs the driver about it. */ void setInit() { LOG.log(Level.FINEST, "Sending Task INIT heartbeat to the Driver."); this.setState(State.INIT); this.heartbeat(); } /** * Sets the state to RUNNING after the handlers for TaskStart have been called. */ void setRunning() { this.setState(State.RUNNING); } void setCloseRequested() { this.setState(State.CLOSE_REQUESTED); } void setSuspendRequested() { this.setState(State.SUSPEND_REQUESTED); } void setKilled() { this.setState(State.KILLED); this.heartbeat(); } boolean isRunning() { return this.state == State.RUNNING; } boolean isNotRunning() { return this.state != State.RUNNING; } boolean hasEnded() { switch (this.state) { case DONE: case SUSPENDED: case FAILED: case KILLED: return true; default: return false; } } State getState() { return this.state; } private void setState(final State state) { if (isLegal(this.state, state)) { this.state = state; } else { final String msg = "Illegal state transition from [" + this.state + "] to [" + state + "]"; LOG.log(Level.SEVERE, msg); throw new RuntimeException(msg); } } String getContextId() { return this.contextId; } /** * @return the messages to be sent on the Task's behalf in the next heartbeat. */ private Collection<TaskMessage> getMessages() { final List<TaskMessage> messageList = new ArrayList<>(this.evaluatorMessageSources.size()); for (final TaskMessageSource messageSource : this.evaluatorMessageSources) { final Optional<TaskMessage> taskMessageOptional = messageSource.getMessage(); if (taskMessageOptional.isPresent()) { messageList.add(taskMessageOptional.get()); } } return messageList; } enum State { PRE_INIT, INIT, RUNNING, CLOSE_REQUESTED, SUSPEND_REQUESTED, SUSPENDED, FAILED, DONE, KILLED } }
package com.sedmelluq.discord.lavaplayer.source.twitch; import com.sedmelluq.discord.lavaplayer.player.AudioPlayerManager; import com.sedmelluq.discord.lavaplayer.source.AudioSourceManager; import com.sedmelluq.discord.lavaplayer.tools.ExceptionTools; import com.sedmelluq.discord.lavaplayer.tools.FriendlyException; import com.sedmelluq.discord.lavaplayer.tools.JsonBrowser; import com.sedmelluq.discord.lavaplayer.tools.Units; import com.sedmelluq.discord.lavaplayer.tools.io.HttpClientTools; import com.sedmelluq.discord.lavaplayer.tools.io.HttpConfigurable; import com.sedmelluq.discord.lavaplayer.tools.io.HttpInterface; import com.sedmelluq.discord.lavaplayer.tools.io.HttpInterfaceManager; import com.sedmelluq.discord.lavaplayer.track.AudioItem; import com.sedmelluq.discord.lavaplayer.track.AudioReference; import com.sedmelluq.discord.lavaplayer.track.AudioTrack; import com.sedmelluq.discord.lavaplayer.track.AudioTrackInfo; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.net.URI; import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.HttpClientBuilder; import static com.sedmelluq.discord.lavaplayer.tools.FriendlyException.Severity.SUSPICIOUS; /** * Audio source manager which detects Twitch tracks by URL. */ public class TwitchStreamAudioSourceManager implements AudioSourceManager, HttpConfigurable { private static final String STREAM_NAME_REGEX = "^https://(?:www\\.|go\\.)?twitch.tv/([^/]+)$"; private static final Pattern streamNameRegex = Pattern.compile(STREAM_NAME_REGEX); public static final String DEFAULT_CLIENT_ID = "jzkbprff40iqj646a697cyrvl0zt2m6"; private final HttpInterfaceManager httpInterfaceManager; private final String twitchClientId; /** * Create an instance. */ public TwitchStreamAudioSourceManager() { this(DEFAULT_CLIENT_ID); } /** * Create an instance. * @param clientId The Twitch client id for your application. */ public TwitchStreamAudioSourceManager(String clientId) { httpInterfaceManager = HttpClientTools.createDefaultThreadLocalManager(); twitchClientId = clientId; } public String getClientId() { return twitchClientId; } @Override public String getSourceName() { return "twitch"; } @Override public AudioItem loadItem(AudioPlayerManager manager, AudioReference reference) { String streamName = getChannelIdentifierFromUrl(reference.identifier); if (streamName == null) { return null; } JsonBrowser accessToken = fetchAccessToken(streamName); if (accessToken == null || accessToken.get("token").isNull()) { return AudioReference.NO_TRACK; } String channelId; try { JsonBrowser token = JsonBrowser.parse(accessToken.get("token").text()); channelId = token.get("channel_id").text(); } catch (IOException e) { return null; } JsonBrowser channelInfo = fetchStreamChannelInfo(channelId); if (channelInfo == null || channelInfo.get("stream").isNull()) { return AudioReference.NO_TRACK; } else { /* --- HELIX STUFF //Retrieve the data value list; this will have only one element since we're getting only one stream's information List<JsonBrowser> dataList = channelInfo.get("data").values(); //The value list is empty if the stream is offline, even when hosting another channel if (dataList.size() == 0){ return null; } //The first one has the title of the broadcast JsonBrowser channelData = dataList.get(0); String status = channelData.get("title").text(); */ JsonBrowser channelData = channelInfo.get("stream").get("channel"); String status = channelData.get("status").text(); return new TwitchStreamAudioTrack(new AudioTrackInfo( status, streamName, Units.DURATION_MS_UNKNOWN, reference.identifier, true, reference.identifier ), this); } } @Override public boolean isTrackEncodable(AudioTrack track) { return true; } @Override public void encodeTrack(AudioTrack track, DataOutput output) throws IOException { // Nothing special to do, URL (identifier) is enough } @Override public AudioTrack decodeTrack(AudioTrackInfo trackInfo, DataInput input) throws IOException { return new TwitchStreamAudioTrack(trackInfo, this); } /** * Extract channel identifier from a channel URL. * @param url Channel URL * @return Channel identifier (for API requests) */ public static String getChannelIdentifierFromUrl(String url) { Matcher matcher = streamNameRegex.matcher(url); if (!matcher.matches()) { return null; } return matcher.group(1); } /** * @param url Request URL * @return Request with necessary headers attached. */ public HttpUriRequest createGetRequest(String url) { return addClientHeaders(new HttpGet(url), twitchClientId); } /** * @param url Request URL * @return Request with necessary headers attached. */ public HttpUriRequest createGetRequest(URI url) { return addClientHeaders(new HttpGet(url), twitchClientId); } /** * @return Get an HTTP interface for a playing track. */ public HttpInterface getHttpInterface() { return httpInterfaceManager.getInterface(); } @Override public void configureRequests(Function<RequestConfig, RequestConfig> configurator) { httpInterfaceManager.configureRequests(configurator); } @Override public void configureBuilder(Consumer<HttpClientBuilder> configurator) { httpInterfaceManager.configureBuilder(configurator); } private static HttpUriRequest addClientHeaders(HttpUriRequest request, String clientId) { request.setHeader("Accept", "application/vnd.twitchtv.v5+json; charset=UTF-8"); request.setHeader("Client-ID", clientId); return request; } private JsonBrowser fetchAccessToken(String name) { try (HttpInterface httpInterface = getHttpInterface()) { // Get access token by channel name HttpUriRequest request = createGetRequest("https://api.twitch.tv/api/channels/" + name + "/access_token"); return HttpClientTools.fetchResponseAsJson(httpInterface, request); } catch (IOException e) { throw new FriendlyException("Loading Twitch channel access token failed.", SUSPICIOUS, e); } } private JsonBrowser fetchStreamChannelInfo(String channelId) { try (HttpInterface httpInterface = getHttpInterface()) { // helix/streams?user_login=name HttpUriRequest request = createGetRequest("https://api.twitch.tv/kraken/streams/" + channelId + "?stream_type=all"); return HttpClientTools.fetchResponseAsJson(httpInterface, request); } catch (IOException e) { throw new FriendlyException("Loading Twitch channel information failed.", SUSPICIOUS, e); } } @Override public void shutdown() { ExceptionTools.closeWithWarnings(httpInterfaceManager); } }
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.service; import org.gluu.persist.ldap.impl.LdapEntryManager; import org.gluu.persist.model.BatchOperation; import org.gluu.persist.model.ProcessBatchOperation; import org.slf4j.Logger; import org.xdi.model.ApplicationType; import org.xdi.oxauth.model.common.AuthorizationGrant; import org.xdi.oxauth.model.common.AuthorizationGrantList; import org.xdi.oxauth.model.configuration.AppConfiguration; import org.xdi.oxauth.model.fido.u2f.DeviceRegistration; import org.xdi.oxauth.model.fido.u2f.RequestMessageLdap; import org.xdi.oxauth.model.registration.Client; import org.xdi.oxauth.service.cdi.event.CleanerEvent; import org.xdi.oxauth.service.fido.u2f.DeviceRegistrationService; import org.xdi.oxauth.service.fido.u2f.RequestService; import org.xdi.oxauth.uma.service.UmaPctService; import org.xdi.oxauth.uma.service.UmaPermissionService; import org.xdi.oxauth.uma.service.UmaResourceService; import org.xdi.oxauth.uma.service.UmaRptService; import org.xdi.service.cdi.async.Asynchronous; import org.xdi.service.cdi.event.Scheduled; import org.xdi.service.timer.event.TimerEvent; import org.xdi.service.timer.schedule.TimerSchedule; import javax.ejb.DependsOn; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import javax.inject.Named; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; /** * @author Yuriy Zabrovarnyy * @author Javier Rojas Blum * @version August 9, 2017 */ @ApplicationScoped @DependsOn("appInitializer") @Named public class CleanerTimer { public final static int BATCH_SIZE = 100; private final static int DEFAULT_INTERVAL = 600; // 10 minutes @Inject private Logger log; @Inject private LdapEntryManager ldapEntryManager; @Inject private AuthorizationGrantList authorizationGrantList; @Inject private ClientService clientService; @Inject private GrantService grantService; @Inject private UmaRptService umaRptService; @Inject private UmaPctService umaPctService; @Inject private UmaPermissionService umaPermissionService; @Inject private UmaResourceService umaResourceService; @Inject private SessionIdService sessionIdService; @Inject @Named("u2fRequestService") private RequestService u2fRequestService; @Inject private MetricService metricService; @Inject private DeviceRegistrationService deviceRegistrationService; @Inject private AppConfiguration appConfiguration; @Inject private Event<TimerEvent> cleanerEvent; private AtomicBoolean isActive; public void initTimer() { log.debug("Initializing Cleaner Timer"); this.isActive = new AtomicBoolean(false); int interval = appConfiguration.getCleanServiceInterval(); if (interval <= 0) { interval = DEFAULT_INTERVAL; } cleanerEvent.fire(new TimerEvent(new TimerSchedule(interval, interval), new CleanerEvent(), Scheduled.Literal.INSTANCE)); } @Asynchronous public void process(@Observes @Scheduled CleanerEvent cleanerEvent) { if (this.isActive.get()) { return; } if (!this.isActive.compareAndSet(false, true)) { return; } try { processAuthorizationGrantList(); processRegisteredClients(); Date now = new Date(); this.umaRptService.cleanup(now); this.umaPermissionService.cleanup(now); this.umaPctService.cleanup(now); this.umaResourceService.cleanup(now); processU2fRequests(); processU2fDeviceRegistrations(); processMetricEntries(); } finally { this.isActive.set(false); } } private void processAuthorizationGrantList() { log.debug("Start AuthorizationGrant clean up"); grantService.cleanUp(); log.debug("End AuthorizationGrant clean up"); } private void processRegisteredClients() { log.debug("Start Client clean up"); BatchOperation<Client> clientBatchService = new ProcessBatchOperation<Client>() { @Override public void performAction(List<Client> entries) { for (Client client : entries) { try { GregorianCalendar now = new GregorianCalendar(TimeZone.getTimeZone("UTC")); GregorianCalendar expirationDate = new GregorianCalendar(TimeZone.getTimeZone("UTC")); expirationDate.setTime(client.getClientSecretExpiresAt()); if (expirationDate.before(now)) { List<AuthorizationGrant> toRemove = authorizationGrantList.getAuthorizationGrant(client.getClientId()); authorizationGrantList.removeAuthorizationGrants(toRemove); log.debug("Removing Client: {}, Expiration date: {}", client.getClientId(), client.getClientSecretExpiresAt()); clientService.remove(client); } } catch (Exception e) { log.error("Failed to remove entry", e); } } } }; clientService.getClientsWithExpirationDate(clientBatchService, new String[] {"inum", "oxAuthClientSecretExpiresAt"}, 0, BATCH_SIZE); log.debug("End Client clean up"); } private void processU2fRequests() { log.debug("Start U2F request clean up"); Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.SECOND, -90); final Date expirationDate = calendar.getTime(); BatchOperation<RequestMessageLdap> requestMessageLdapBatchService = new ProcessBatchOperation<RequestMessageLdap>() { @Override public void performAction(List<RequestMessageLdap> entries) { for (RequestMessageLdap requestMessageLdap : entries) { try { log.debug("Removing RequestMessageLdap: {}, Creation date: {}", requestMessageLdap.getRequestId(), requestMessageLdap.getCreationDate()); u2fRequestService.removeRequestMessage(requestMessageLdap); } catch (Exception e) { log.error("Failed to remove entry", e); } } } }; u2fRequestService.getExpiredRequestMessages(requestMessageLdapBatchService, expirationDate, new String[] {"oxRequestId", "creationDate"}, 0, BATCH_SIZE); log.debug("End U2F request clean up"); } private void processU2fDeviceRegistrations() { log.debug("Start U2F request clean up"); Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.SECOND, -90); final Date expirationDate = calendar.getTime(); BatchOperation<DeviceRegistration> deviceRegistrationBatchService = new ProcessBatchOperation<DeviceRegistration>() { @Override public void performAction(List<DeviceRegistration> entries) { for (DeviceRegistration deviceRegistration : entries) { try { log.debug("Removing DeviceRegistration: {}, Creation date: {}", deviceRegistration.getId(), deviceRegistration.getCreationDate()); deviceRegistrationService.removeUserDeviceRegistration(deviceRegistration); } catch (Exception e) { log.error("Failed to remove entry", e); } } } }; deviceRegistrationService.getExpiredDeviceRegistrations(deviceRegistrationBatchService, expirationDate, new String[] {"oxId", "creationDate"}, 0, BATCH_SIZE); log.debug("End U2F request clean up"); } private void processMetricEntries() { log.debug("Start metric entries clean up"); int keepDataDays = appConfiguration.getMetricReporterKeepDataDays(); Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); calendar.add(Calendar.DATE, -keepDataDays); Date expirationDate = calendar.getTime(); metricService.removeExpiredMetricEntries(expirationDate, ApplicationType.OX_AUTH, metricService.applianceInum(), 0, BATCH_SIZE); log.debug("End metric entries clean up"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.core; import javax.servlet.DispatcherType; import javax.servlet.Servlet; import javax.servlet.ServletRequest; import org.apache.catalina.Globals; import org.apache.catalina.Wrapper; import org.apache.catalina.comet.CometFilter; import org.apache.catalina.connector.Request; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.descriptor.web.FilterMap; /** * Factory for the creation and caching of Filters and creation * of Filter Chains. * * @author Greg Murray * @author Remy Maucherat */ public final class ApplicationFilterFactory { private ApplicationFilterFactory() { // Prevent instance creation. This is a utility class. } /** * Construct and return a FilterChain implementation that will wrap the * execution of the specified servlet instance. If we should not execute * a filter chain at all, return <code>null</code>. * * @param request The servlet request we are processing * @param servlet The servlet instance to be wrapped */ public static ApplicationFilterChain createFilterChain (ServletRequest request, Wrapper wrapper, Servlet servlet) { // get the dispatcher type DispatcherType dispatcher = null; if (request.getAttribute(Globals.DISPATCHER_TYPE_ATTR) != null) { dispatcher = (DispatcherType) request.getAttribute( Globals.DISPATCHER_TYPE_ATTR); } String requestPath = null; Object attribute = request.getAttribute( Globals.DISPATCHER_REQUEST_PATH_ATTR); if (attribute != null){ requestPath = attribute.toString(); } // If there is no servlet to execute, return null if (servlet == null) return (null); boolean comet = false; // Create and initialize a filter chain object ApplicationFilterChain filterChain = null; if (request instanceof Request) { Request req = (Request) request; comet = req.isComet(); if (Globals.IS_SECURITY_ENABLED) { // Security: Do not recycle filterChain = new ApplicationFilterChain(); if (comet) { req.setFilterChain(filterChain); } } else { filterChain = (ApplicationFilterChain) req.getFilterChain(); if (filterChain == null) { filterChain = new ApplicationFilterChain(); req.setFilterChain(filterChain); } } } else { // Request dispatcher in use filterChain = new ApplicationFilterChain(); } filterChain.setServlet(servlet); filterChain.setSupport (((StandardWrapper)wrapper).getInstanceSupport()); // Acquire the filter mappings for this Context StandardContext context = (StandardContext) wrapper.getParent(); FilterMap filterMaps[] = context.findFilterMaps(); // If there are no filter mappings, we are done if ((filterMaps == null) || (filterMaps.length == 0)) return (filterChain); // Acquire the information we will need to match filter mappings String servletName = wrapper.getName(); // Add the relevant path-mapped filters to this filter chain for (int i = 0; i < filterMaps.length; i++) { if (!matchDispatcher(filterMaps[i] ,dispatcher)) { continue; } if (!matchFiltersURL(filterMaps[i], requestPath)) continue; ApplicationFilterConfig filterConfig = (ApplicationFilterConfig) context.findFilterConfig(filterMaps[i].getFilterName()); if (filterConfig == null) { // FIXME - log configuration problem continue; } boolean isCometFilter = false; if (comet) { try { isCometFilter = filterConfig.getFilter() instanceof CometFilter; } catch (Exception e) { // Note: The try catch is there because getFilter has a lot of // declared exceptions. However, the filter is allocated much // earlier Throwable t = ExceptionUtils.unwrapInvocationTargetException(e); ExceptionUtils.handleThrowable(t); } if (isCometFilter) { filterChain.addFilter(filterConfig); } } else { filterChain.addFilter(filterConfig); } } // Add filters that match on servlet name second for (int i = 0; i < filterMaps.length; i++) { if (!matchDispatcher(filterMaps[i] ,dispatcher)) { continue; } if (!matchFiltersServlet(filterMaps[i], servletName)) continue; ApplicationFilterConfig filterConfig = (ApplicationFilterConfig) context.findFilterConfig(filterMaps[i].getFilterName()); if (filterConfig == null) { // FIXME - log configuration problem continue; } boolean isCometFilter = false; if (comet) { try { isCometFilter = filterConfig.getFilter() instanceof CometFilter; } catch (Exception e) { // Note: The try catch is there because getFilter has a lot of // declared exceptions. However, the filter is allocated much // earlier } if (isCometFilter) { filterChain.addFilter(filterConfig); } } else { filterChain.addFilter(filterConfig); } } // Return the completed filter chain return (filterChain); } // -------------------------------------------------------- Private Methods /** * Return <code>true</code> if the context-relative request path * matches the requirements of the specified filter mapping; * otherwise, return <code>false</code>. * * @param filterMap Filter mapping being checked * @param requestPath Context-relative request path of this request */ private static boolean matchFiltersURL(FilterMap filterMap, String requestPath) { // Check the specific "*" special URL pattern, which also matches // named dispatches if (filterMap.getMatchAllUrlPatterns()) return (true); if (requestPath == null) return (false); // Match on context relative request path String[] testPaths = filterMap.getURLPatterns(); for (int i = 0; i < testPaths.length; i++) { if (matchFiltersURL(testPaths[i], requestPath)) { return (true); } } // No match return (false); } /** * Return <code>true</code> if the context-relative request path * matches the requirements of the specified filter mapping; * otherwise, return <code>false</code>. * * @param testPath URL mapping being checked * @param requestPath Context-relative request path of this request */ private static boolean matchFiltersURL(String testPath, String requestPath) { if (testPath == null) return (false); // Case 1 - Exact Match if (testPath.equals(requestPath)) return (true); // Case 2 - Path Match ("/.../*") if (testPath.equals("/*")) return (true); if (testPath.endsWith("/*")) { if (testPath.regionMatches(0, requestPath, 0, testPath.length() - 2)) { if (requestPath.length() == (testPath.length() - 2)) { return (true); } else if ('/' == requestPath.charAt(testPath.length() - 2)) { return (true); } } return (false); } // Case 3 - Extension Match if (testPath.startsWith("*.")) { int slash = requestPath.lastIndexOf('/'); int period = requestPath.lastIndexOf('.'); if ((slash >= 0) && (period > slash) && (period != requestPath.length() - 1) && ((requestPath.length() - period) == (testPath.length() - 1))) { return (testPath.regionMatches(2, requestPath, period + 1, testPath.length() - 2)); } } // Case 4 - "Default" Match return (false); // NOTE - Not relevant for selecting filters } /** * Return <code>true</code> if the specified servlet name matches * the requirements of the specified filter mapping; otherwise * return <code>false</code>. * * @param filterMap Filter mapping being checked * @param servletName Servlet name being checked */ private static boolean matchFiltersServlet(FilterMap filterMap, String servletName) { if (servletName == null) { return (false); } // Check the specific "*" special servlet name else if (filterMap.getMatchAllServletNames()) { return (true); } else { String[] servletNames = filterMap.getServletNames(); for (int i = 0; i < servletNames.length; i++) { if (servletName.equals(servletNames[i])) { return (true); } } return false; } } /** * Convenience method which returns true if the dispatcher type * matches the dispatcher types specified in the FilterMap */ private static boolean matchDispatcher(FilterMap filterMap, DispatcherType type) { switch (type) { case FORWARD : if ((filterMap.getDispatcherMapping() & FilterMap.FORWARD) > 0) { return true; } break; case INCLUDE : if ((filterMap.getDispatcherMapping() & FilterMap.INCLUDE) > 0) { return true; } break; case REQUEST : if ((filterMap.getDispatcherMapping() & FilterMap.REQUEST) > 0) { return true; } break; case ERROR : if ((filterMap.getDispatcherMapping() & FilterMap.ERROR) > 0) { return true; } break; case ASYNC : if ((filterMap.getDispatcherMapping() & FilterMap.ASYNC) > 0) { return true; } break; } return false; } }
/* * Copyright 2015 West Coast Informatics, LLC */ package org.ihtsdo.otf.ts.jpa.client; import java.util.Properties; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status.Family; import org.apache.log4j.Logger; import org.ihtsdo.otf.ts.helpers.AssociationReferenceRefSetMemberList; import org.ihtsdo.otf.ts.helpers.AssociationReferenceRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.AttributeValueRefSetMemberList; import org.ihtsdo.otf.ts.helpers.AttributeValueRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.ComplexMapRefSetMemberList; import org.ihtsdo.otf.ts.helpers.ComplexMapRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.ConceptList; import org.ihtsdo.otf.ts.helpers.ConceptListJpa; import org.ihtsdo.otf.ts.helpers.ConfigUtility; import org.ihtsdo.otf.ts.helpers.DescriptionTypeRefSetMemberList; import org.ihtsdo.otf.ts.helpers.DescriptionTypeRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.LanguageRefSetMemberList; import org.ihtsdo.otf.ts.helpers.LanguageRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.ModuleDependencyRefSetMemberList; import org.ihtsdo.otf.ts.helpers.ModuleDependencyRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.PfsParameterJpa; import org.ihtsdo.otf.ts.helpers.RefsetDescriptorRefSetMemberList; import org.ihtsdo.otf.ts.helpers.RefsetDescriptorRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.RelationshipList; import org.ihtsdo.otf.ts.helpers.RelationshipListJpa; import org.ihtsdo.otf.ts.helpers.SearchResultList; import org.ihtsdo.otf.ts.helpers.SearchResultListJpa; import org.ihtsdo.otf.ts.helpers.SimpleMapRefSetMemberList; import org.ihtsdo.otf.ts.helpers.SimpleMapRefSetMemberListJpa; import org.ihtsdo.otf.ts.helpers.SimpleRefSetMemberList; import org.ihtsdo.otf.ts.helpers.SimpleRefSetMemberListJpa; import org.ihtsdo.otf.ts.rest.ContentServiceRest; import org.ihtsdo.otf.ts.rf2.Concept; import org.ihtsdo.otf.ts.rf2.Description; import org.ihtsdo.otf.ts.rf2.Relationship; import org.ihtsdo.otf.ts.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.ts.rf2.jpa.DescriptionJpa; import org.ihtsdo.otf.ts.rf2.jpa.RelationshipJpa; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; /** * A client for connecting to a content REST service. */ public class ContentClientRest implements ContentServiceRest { /** The config. */ private Properties config = null; /** * Instantiates a {@link ContentClientRest} from the specified parameters. * * @param config the config */ public ContentClientRest(Properties config) { this.config = config; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.rest.ContentServiceRest#getConcept(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public ConceptList getConcepts(String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get concepts " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptListJpa list = (ConceptListJpa) ConfigUtility.getGraphForString(resultString, ConceptListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getSingleConcept(java.lang.String * , java.lang.String, java.lang.String, java.lang.String) */ @Override public Concept getSingleConcept(String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get single concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptJpa c = (ConceptJpa) ConfigUtility.getGraphForString(resultString, ConceptJpa.class); return c; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.mapping.rest.ContentServiceRest#findConceptsForQuery(java * .lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public SearchResultList findConceptsForQuery(String terminology, String version, String searchString, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find concepts " + terminology + ", " + version + ", " + searchString + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/query/" + searchString); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object SearchResultListJpa list = (SearchResultListJpa) ConfigUtility.getGraphForString(resultString, SearchResultListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getDescendantConcepts(java.lang * .String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public ConceptList findDescendantConcepts(String terminologyId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find descendant concepts " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/" + terminologyId + "/descendants"); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptListJpa list = (ConceptListJpa) ConfigUtility.getGraphForString(resultString, ConceptListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getChildConcepts(java.lang.String * , java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public ConceptList findChildConcepts(String terminologyId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find child concepts " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/" + terminologyId + "/children"); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptListJpa list = (ConceptListJpa) ConfigUtility.getGraphForString(resultString, ConceptListJpa.class); return list; } /** * Find parent concepts. * * @param terminologyId the terminology id * @param terminology the terminology * @param version the version * @param pfs the pfs * @param authToken the auth token * @return the concept list * @throws Exception the exception */ @Override public ConceptList findParentConcepts(String terminologyId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find parent concepts " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/" + terminologyId + "/parents"); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptListJpa list = (ConceptListJpa) ConfigUtility.getGraphForString(resultString, ConceptListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getAncestorConcepts(java.lang * .String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public ConceptList findAncestorConcepts(String terminologyId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find ancestor concepts " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/concepts/" + terminology + "/" + version + "/" + terminologyId + "/ancestors"); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ConceptListJpa list = (ConceptListJpa) ConfigUtility.getGraphForString(resultString, ConceptListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getDescription(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public Description getDescription(String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get description " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/description/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object DescriptionJpa description = (DescriptionJpa) ConfigUtility.getGraphForString(resultString, DescriptionJpa.class); return description; } /* * (non-Javadoc) * * @see >>>>>>> 7050b6374e3c69f40d335f0d0a6222a6ae1c7816 * org.ihtsdo.otf.ts.rest.ContentChangeServiceRest#luceneReindex(java.lang * .String, java.lang.String) */ @Override public void luceneReindex(String indexedObjects, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - lucene reindex " + indexedObjects); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/reindex"); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.TEXT_PLAIN) .post(ClientResponse.class, indexedObjects); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { if (response.getStatus() != 204) throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#loadTerminologyRf2Snapshot(java * .lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public void loadTerminologyRf2Snapshot(String terminology, String version, String inputDir, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - load terminology rf2 snapshot " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/terminology/load/rf2/snapshot/" + terminology + "/" + version); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .put(ClientResponse.class, inputDir); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#loadTerminologyRf2Full(java.lang * .String, java.lang.String, java.lang.String, java.lang.String) */ @Override public void loadTerminologyRf2Full(String terminology, String version, String inputDir, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - load terminology rf2 full " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/terminology/load/rf2/full/" + terminology + "/" + version); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .put(ClientResponse.class, inputDir); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#loadTerminologyRf2Delta(java. * lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public void loadTerminologyRf2Delta(String terminology, String inputDir, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - load terminology rf2 delta " + terminology); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/terminology/load/rf2/delta/" + terminology); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .put(ClientResponse.class, inputDir); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#loadTerminologyClaml(java.lang * .String, java.lang.String, java.lang.String, java.lang.String) */ @Override public void loadTerminologyClaml(String terminology, String version, String inputFile, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - load terminology ClaML " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/terminology/load/claml/" + terminology + "/" + version); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .put(ClientResponse.class, inputFile); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#computeTransitiveClosure(java * .lang.String, java.lang.String, java.lang.String) */ @Override public void computeTransitiveClosure(String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - compute transitive closure"); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/terminology/closure/compute/" + terminology + "/" + version); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#removeTerminology(java.lang.String * , java.lang.String, java.lang.String) */ @Override public void removeTerminology(String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - remove terminology " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/terminology/remove/" + terminology + "/" + version); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .delete(ClientResponse.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { // do nothing } else { throw new Exception("Unexpected status " + response.getStatus()); } } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findAssociationReferenceRefSetMembers * (java.lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public AssociationReferenceRefSetMemberList findAssociationReferenceRefSetMembers( String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find association reference members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/associationReferenceMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AssociationReferenceRefSetMemberListJpa list = (AssociationReferenceRefSetMemberListJpa) ConfigUtility .getGraphForString(resultString, AssociationReferenceRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findAttributeValueRefSetMembers * (java.lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public AttributeValueRefSetMemberList findAttributeValueRefSetMembers( String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find attribute value members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/attributeValueMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AttributeValueRefSetMemberListJpa list = (AttributeValueRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, AttributeValueRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findComplexMapRefSetMembers(java * .lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public ComplexMapRefSetMemberList findComplexMapRefSetMembers( String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find complex map members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/complexMapMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ComplexMapRefSetMemberListJpa list = (ComplexMapRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, ComplexMapRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findDescriptionTypeRefSetMembers * (java.lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public DescriptionTypeRefSetMemberList findDescriptionTypeRefSetMembers( String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find description type members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/descriptionTypeMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object DescriptionTypeRefSetMemberListJpa list = (DescriptionTypeRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, DescriptionTypeRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findLanguageRefSetMembers(java * .lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public LanguageRefSetMemberList findLanguageRefSetMembers(String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find language members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/languageMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object LanguageRefSetMemberListJpa list = (LanguageRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, LanguageRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getModuleDependencyRefSetMembersForModule(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public ModuleDependencyRefSetMemberList getModuleDependencyRefSetMembersForModule( String moduleId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find module dependency members by module" + moduleId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/moduleDependencyMember/module/" + terminology + "/" + version + "/" + moduleId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ModuleDependencyRefSetMemberListJpa list = (ModuleDependencyRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, ModuleDependencyRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getRefsetDescriptorRefSetMembers * (java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public RefsetDescriptorRefSetMemberList getRefsetDescriptorRefSetMembers( String refSetId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get refset descriptor members by refset " + refSetId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/refsetDescriptorMember/refSet/" + terminology + "/" + version + "/" + refSetId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object RefsetDescriptorRefSetMemberListJpa list = (RefsetDescriptorRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, RefsetDescriptorRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findSimpleMapRefSetMembers(java * .lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public SimpleMapRefSetMemberList findSimpleMapRefSetMembers(String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find simple map members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/simpleMapMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object SimpleMapRefSetMemberListJpa list = (SimpleMapRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, SimpleMapRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#findSimpleRefSetMembers(java. * lang.String, java.lang.String, java.lang.String, * org.ihtsdo.otf.ts.helpers.PfsParameterJpa, java.lang.String) */ @Override public SimpleRefSetMemberList findSimpleRefSetMembers(String refSetId, String terminology, String version, PfsParameterJpa pfs, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - find simple members by refset " + refSetId + ", " + terminology + ", " + version + ", " + pfs); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/simpleMember/refSet/" + terminology + "/" + version + "/" + refSetId); String pfsString = ConfigUtility.getStringForGraph(pfs == null ? new PfsParameterJpa() : pfs); Logger.getLogger(getClass()).debug(pfsString); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken) .header("Content-type", MediaType.APPLICATION_XML) .post(ClientResponse.class, pfsString); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object SimpleRefSetMemberListJpa list = (SimpleRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, SimpleRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getAssociationReferenceRefSetMembersForConcept(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public AssociationReferenceRefSetMemberList getAssociationReferenceRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get association reference members by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/associationReferenceMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AssociationReferenceRefSetMemberListJpa list = (AssociationReferenceRefSetMemberListJpa) ConfigUtility .getGraphForString(resultString, AssociationReferenceRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getAssociationReferenceRefSetMembersForDescription(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public AssociationReferenceRefSetMemberList getAssociationReferenceRefSetMembersForDescription( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get association reference members by description " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/associationReferenceMember/description/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AssociationReferenceRefSetMemberListJpa list = (AssociationReferenceRefSetMemberListJpa) ConfigUtility .getGraphForString(resultString, AssociationReferenceRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getAttributeValueRefSetMembersForConcept(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public AttributeValueRefSetMemberList getAttributeValueRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get attribute value members by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/attributeValueMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AttributeValueRefSetMemberListJpa list = (AttributeValueRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, AttributeValueRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getAttributeValueRefSetMembersForDescription(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public AttributeValueRefSetMemberList getAttributeValueRefSetMembersForDescription( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get attribute value members by description " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/attributeValueMember/description/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object AttributeValueRefSetMemberListJpa list = (AttributeValueRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, AttributeValueRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getComplexMapRefSetMembersForConcept * (java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public ComplexMapRefSetMemberList getComplexMapRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get complex map members by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/complexMapMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object ComplexMapRefSetMemberListJpa list = (ComplexMapRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, ComplexMapRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getDescriptionTypeRefSetMembersForConcept(java.lang.String, * java.lang.String, java.lang.String, java.lang.String) */ @Override public DescriptionTypeRefSetMemberList getDescriptionTypeRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/descriptionTypeMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object DescriptionTypeRefSetMemberListJpa list = (DescriptionTypeRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, DescriptionTypeRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see org.ihtsdo.otf.ts.rest.ContentServiceRest# * getLanguageRefSetMembersForDescription(java.lang.String, java.lang.String, * java.lang.String, java.lang.String) */ @Override public LanguageRefSetMemberList getLanguageRefSetMembersForDescription( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get language members by description " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/languageMember/description/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object LanguageRefSetMemberListJpa list = (LanguageRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, LanguageRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getSimpleMapRefSetMembersForConcept * (java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public SimpleMapRefSetMemberList getSimpleMapRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get simple map members by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/simpleMapMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object SimpleMapRefSetMemberListJpa list = (SimpleMapRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, SimpleMapRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getSimpleRefSetMembersForConcept * (java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public SimpleRefSetMemberList getSimpleRefSetMembersForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get simple members by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/simpleMember/concept/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object SimpleRefSetMemberListJpa list = (SimpleRefSetMemberListJpa) ConfigUtility.getGraphForString( resultString, SimpleRefSetMemberListJpa.class); return list; } /* * (non-Javadoc) * * @see * org.ihtsdo.otf.ts.rest.ContentServiceRest#getInverseRelationshipsForConcept * (java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public RelationshipList getInverseRelationshipsForConcept( String terminologyId, String terminology, String version, String authToken) throws Exception { Logger.getLogger(getClass()).debug( "Content Client - get inverse relationships by concept " + terminologyId + ", " + terminology + ", " + version); Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/relationship/inverse/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object RelationshipListJpa list = (RelationshipListJpa) ConfigUtility.getGraphForString(resultString, RelationshipListJpa.class); return list; } /* (non-Javadoc) * @see org.ihtsdo.otf.ts.rest.ContentServiceRest#getRelationship(java.lang.String, java.lang.String, java.lang.String, java.lang.String) */ @Override public Relationship getRelationship(String terminologyId, String terminology, String version, String authToken) throws Exception { Client client = Client.create(); WebResource resource = client.resource(config.getProperty("base.url") + "/content/relationship/" + terminology + "/" + version + "/" + terminologyId); ClientResponse response = resource.accept(MediaType.APPLICATION_XML) .header("Authorization", authToken).get(ClientResponse.class); String resultString = response.getEntity(String.class); if (response.getStatusInfo().getFamily() == Family.SUCCESSFUL) { Logger.getLogger(getClass()).debug(resultString); } else { throw new Exception(response.toString()); } // converting to object Relationship d = (Relationship) ConfigUtility.getGraphForString(resultString, RelationshipJpa.class); return d; } }
package net.earthcomputer.vimapi.nbt; import java.util.Iterator; import java.util.List; import com.google.common.collect.Lists; public class NBTList extends NBTBase implements Iterable<NBTBase> { private List<NBTBase> data = Lists.newArrayList(); private byte tagType; public NBTBase get(int ind) { return data.get(ind); } public byte getTagType() { return size() == 0 ? 0 : tagType; } public byte[] getByteArray(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTByteArray) { return ((NBTByteArray) nbt).get(); } return new byte[0]; } public byte getByte(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getByte(); } return 0; } public short getShort(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getShort(); } return 0; } public int getInt(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getInt(); } return 0; } public long getLong(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getLong(); } return 0; } public float getFloat(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getFloat(); } return 0; } public double getDouble(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTPrimitive) { return ((NBTPrimitive) nbt).getDouble(); } return 0; } public int[] getIntArray(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTIntArray) { return ((NBTIntArray) nbt).get(); } return new int[0]; } public String getString(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTString) { return ((NBTString) nbt).get(); } return ""; } public NBTList getList(int ind, byte tagType) { NBTBase nbt = get(ind); if (nbt instanceof NBTList) { NBTList nbtList = (NBTList) nbt; if (nbtList.size() == 0 || nbtList.getTagType() == tagType) { return nbtList; } } return new NBTList(); } public NBTCompound getCompound(int ind) { NBTBase nbt = get(ind); if (nbt instanceof NBTCompound) { return (NBTCompound) nbt; } return new NBTCompound(); } public void set(int ind, NBTBase val) { data.set(ind, val); } public void setByteArray(int ind, byte[] val) { set(ind, new NBTByteArray(val)); } public void setByte(int ind, byte val) { set(ind, new NBTByte(val)); } public void setShort(int ind, short val) { set(ind, new NBTShort(val)); } public void setInt(int ind, int val) { set(ind, new NBTInt(val)); } public void setLong(int ind, long val) { set(ind, new NBTLong(val)); } public void setFloat(int ind, float val) { set(ind, new NBTFloat(val)); } public void setDouble(int ind, double val) { set(ind, new NBTDouble(val)); } public void setIntArray(int ind, int[] val) { set(ind, new NBTIntArray(val)); } public void setString(int ind, String val) { set(ind, new NBTString(val)); } public void add(NBTBase val) { if (size() == 0) { tagType = val.getType(); } else { if (val.getType() != tagType) { throw new IllegalArgumentException("Adding wrong type to NBTList"); } } data.add(val); } public void addByteArray(byte[] val) { add(new NBTByteArray(val)); } public void addByte(byte val) { add(new NBTByte(val)); } public void addShort(short val) { add(new NBTShort(val)); } public void addInt(int val) { add(new NBTInt(val)); } public void addLong(long val) { add(new NBTLong(val)); } public void addFloat(float val) { add(new NBTFloat(val)); } public void addDouble(double val) { add(new NBTDouble(val)); } public void addIntArray(int[] val) { add(new NBTIntArray(val)); } public void addString(String val) { add(new NBTString(val)); } public void add(int ind, NBTBase val) { if (size() == 0) { tagType = val.getType(); } else { if (val.getType() != tagType) { throw new IllegalArgumentException("Adding wrong type to NBTList"); } } data.add(ind, val); } public void addByteArray(int ind, byte[] val) { add(ind, new NBTByteArray(val)); } public void addByte(int ind, byte val) { add(ind, new NBTByte(val)); } public void addShort(int ind, short val) { add(ind, new NBTShort(val)); } public void addInt(int ind, int val) { add(ind, new NBTInt(val)); } public void addLong(int ind, long val) { add(ind, new NBTLong(val)); } public void addFloat(int ind, float val) { add(ind, new NBTFloat(val)); } public void addDouble(int ind, double val) { add(ind, new NBTDouble(val)); } public void addIntArray(int ind, int[] val) { add(ind, new NBTIntArray(val)); } public void addString(int ind, String val) { add(ind, new NBTString(val)); } public void remove(int ind) { data.remove(ind); } public int indexOf(NBTBase val) { return data.indexOf(val); } public int size() { return data.size(); } @Override public Iterator<NBTBase> iterator() { return data.iterator(); } @Override public byte getType() { return TYPE_LIST; } @Override public NBTList copy() { NBTList copy = new NBTList(); for (NBTBase element : this) { copy.add(element.copy()); } return copy; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.io.Serializable; import java.util.Map; import javax.cache.Cache; import javax.cache.integration.CacheLoaderException; import javax.cache.integration.CacheWriterException; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryObjectBuilder; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.store.CacheStoreAdapter; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.binary.BinaryMarshaller; import org.apache.ignite.internal.processors.cache.extras.GridCacheObsoleteEntryExtras; import org.apache.ignite.internal.processors.cache.store.CacheLocalStore; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.marshaller.jdk.JdkMarshaller; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.jsr166.ConcurrentHashMap8; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheRebalanceMode.SYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Checks whether storing to local store doesn't cause binary objects unmarshalling, * and as a consequence {@link ClassNotFoundException} to be thrown. * * @see <a href="https://issues.apache.org/jira/browse/IGNITE-2753"> * https://issues.apache.org/jira/browse/IGNITE-2753 * </a> */ public class GridCacheStoreManagerDeserializationTest extends GridCommonAbstractTest { /** IP finder. */ protected static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Cache store. */ protected static final GridCacheLocalTestStore store = new GridCacheLocalTestStore(); /** Test cache name. */ protected static final String CACHE_NAME = "cache_name"; /** * @return Cache mode. */ protected CacheMode cacheMode() { return PARTITIONED; } /** * @return Cache synchronization mode. */ private CacheWriteSynchronizationMode cacheWriteSynchronizationMode() { return FULL_SYNC; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected IgniteConfiguration getConfiguration(final String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); if (igniteInstanceName != null && igniteInstanceName.toLowerCase().startsWith("binary")) c.setMarshaller(new BinaryMarshaller()); else c.setMarshaller(new JdkMarshaller()); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); c.setDiscoverySpi(disco); c.setCacheConfiguration(cacheConfiguration()); return c; } /** * @return Cache configuration. */ @SuppressWarnings("unchecked") protected CacheConfiguration cacheConfiguration() { CacheConfiguration cc = defaultCacheConfiguration(); // Template cc.setName("*"); cc.setRebalanceMode(SYNC); cc.setCacheStoreFactory(singletonFactory(store)); cc.setReadThrough(true); cc.setWriteThrough(true); cc.setLoadPreviousValue(true); cc.setStoreKeepBinary(true); cc.setCacheMode(cacheMode()); cc.setWriteSynchronizationMode(cacheWriteSynchronizationMode()); cc.setBackups(0); cc.setAtomicityMode(CacheAtomicityMode.ATOMIC); return cc; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { store.map.clear(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); } /** * Check whether test objects are stored correctly via stream API. * * @throws Exception If failed. */ public void testStream() throws Exception { final Ignite grid = startGrid(); final IgniteCache<TestObj, TestObj> cache = grid.createCache(CACHE_NAME); final TestObj testObj = streamData(grid); cache.destroy(); cache.close(); assert store.map.containsKey(testObj); final IgniteCache<TestObj, TestObj> cache2 = grid.createCache(CACHE_NAME); assert testObj.equals(cache2.get(testObj)); assert store.map.containsKey(testObj); } /** * Simulate case where is called * {@link org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheEntry#clearInternal( * GridCacheVersion, GridCacheObsoleteEntryExtras)} * * @throws Exception If failed. */ public void testPartitionMove() throws Exception { final Ignite grid = startGrid("binaryGrid1"); grid.createCache(CACHE_NAME); final BinaryObjectBuilder builder = grid.binary().builder("custom_type"); final IgniteDataStreamer<BinaryObject, BinaryObject> streamer = grid.dataStreamer(CACHE_NAME); streamer.keepBinary(true); final int itemsNum = 10_000; for (int i = 0; i < itemsNum; i++) { final BinaryObject key = builder.setField("id", i).build(); streamer.addData(key, key); } streamer.flush(); streamer.close(); streamer.future().get(); assert store.map.size() == itemsNum; startGrid("binaryGrid2"); startGrid("binaryGrid3"); startGrid("binaryGrid4"); Thread.sleep(10_000); } /** * TODO GG-11148. * * Check whether binary objects are stored without unmarshalling via stream API. * * @throws Exception If failed. */ public void _testBinaryStream() throws Exception { final Ignite grid = startGrid("binaryGrid"); final IgniteCache<BinaryObject, BinaryObject> cache = grid.createCache(CACHE_NAME).withKeepBinary(); final BinaryObject key = streamBinaryData(grid); assert cache.containsKey(key); assert store.map.containsKey(key); cache.destroy(); cache.close(); assert store.map.containsKey(key); final IgniteCache<BinaryObject, BinaryObject> cache2 = grid.createCache(CACHE_NAME).withKeepBinary(); final BinaryObject loaded = cache2.get(key); assertSame(loaded, key); assertTrue(store.map.containsKey(key)); } /** * Create and add test data via Streamer API. * * @param grid to get streamer. * @return test object (it is key and val). */ private TestObj streamData(final Ignite grid) { final IgniteDataStreamer<TestObj, TestObj> streamer = grid.dataStreamer(CACHE_NAME); TestObj entity = null; for (int i = 0; i < 1; i++) { entity = new TestObj(i); streamer.addData(entity, entity); } streamer.flush(); streamer.close(); streamer.future().get(); return entity; } /** * Create and add binary data via Streamer API. * * @param grid to get streamer. * @return test object (it is key and val). */ private BinaryObject streamBinaryData(final Ignite grid) { final IgniteDataStreamer<BinaryObject, BinaryObject> streamer = grid.dataStreamer(CACHE_NAME); streamer.keepBinary(true); final BinaryObjectBuilder builder = grid.binary().builder("custom_type"); BinaryObject entity = null; for (int i = 0; i < 1; i++) { builder.setField("id", i); entity = builder.build(); streamer.addData(entity, entity); } streamer.flush(); streamer.close(); streamer.future().get(); return entity; } /** * Local store mock. * * @param <K> * @param <V> */ @CacheLocalStore protected static class GridCacheLocalTestStore<K, V> extends CacheStoreAdapter<K, V> { /** */ public final Map<K, V> map = new ConcurrentHashMap8<>(); /** {@inheritDoc} */ @Override public V load(final K key) throws CacheLoaderException { return map.get(key); } /** {@inheritDoc} */ @Override public void write(final Cache.Entry<? extends K, ? extends V> entry) throws CacheWriterException { map.put(entry.getKey(), entry.getValue()); } /** {@inheritDoc} */ @Override public void delete(final Object key) throws CacheWriterException { map.remove(key); } } /** * Test object. */ static class TestObj implements Serializable { /** */ Integer val; /** */ public TestObj() { } /** */ public TestObj(final Integer val) { this.val = val; } /** {@inheritDoc} */ @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final TestObj testObj = (TestObj) o; return val != null ? val.equals(testObj.val) : testObj.val == null; } /** {@inheritDoc} */ @Override public int hashCode() { return val != null ? val.hashCode() : 0; } } }
package com.therandomlabs.utils.io; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.nio.charset.MalformedInputException; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileTime; import java.text.DecimalFormat; import java.util.List; import java.util.Scanner; import java.util.function.Predicate; import java.util.stream.Stream; import com.therandomlabs.utils.collection.ArrayUtils; import com.therandomlabs.utils.collection.TRLCollectors; import com.therandomlabs.utils.collection.TRLList; import com.therandomlabs.utils.misc.ReflectionUtils; import com.therandomlabs.utils.misc.StringUtils; import com.therandomlabs.utils.platform.Platform; import com.therandomlabs.utils.wrapper.BooleanWrapper; public final class IOUtils { public static final char PATH_SEPARATOR_UNIX = '/'; public static final char PATH_SEPARATOR_WINDOWS = '\\'; public static final char PATH_SEPARATOR = Platform.IS_WINDOWS_OR_WINDOWS_CE ? PATH_SEPARATOR_WINDOWS : PATH_SEPARATOR_UNIX; public static final char PATH_LIST_SEPARATOR = File.pathSeparatorChar; public static final String LINE_SEPARATOR_UNIX = "\n"; public static final String LINE_SEPARATOR_WINDOWS = "\r\n"; public static final String LINE_SEPARATOR; public static final Path CURRENT_WORKING_DIRECTORY = Paths.get(".").toAbsolutePath().normalize(); public static final Predicate<Path> DELETE_ALL = path -> true; public static final Predicate<Path> DELETE_EMPTY_DIRECTORIES = IOUtils::isEmptyRecursive; public static final Predicate<Path> DELETE_FILES = Files::isRegularFile; private static final String[] SI_UNITS = { "B", "kB", "MB", "GB", "TB", "PB", "EB" }; private static final String[] BINARY_UNITS = { "Bi", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB" }; private static final double LOG_1024 = Math.log10(1024); static { final StringBuilderWriter writer = new StringBuilderWriter(4); //This can be more accurate than the "line.separator" system property try(final PrintWriter printWriter = new PrintWriter(writer)) { printWriter.println(); LINE_SEPARATOR = writer.toString(); } } private IOUtils() {} public static boolean isValidPath(String path) { if(path == null) { return false; } try { Paths.get(path); return true; } catch(InvalidPathException ignored) {} return false; } public static void deleteDirectory(Path path) throws IOException { deleteDirectorySelectively(path, DELETE_ALL); } public static void deleteDirectorySelectively(Path path, Predicate<Path> predicate) throws IOException { Files.walkFileTree(path, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attributes) throws IOException { if(predicate.test(file)) { Files.delete(file); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path directory, IOException ex) throws IOException { if(ex != null) { return FileVisitResult.TERMINATE; } if(predicate.test(directory)) { Files.delete(directory); } return FileVisitResult.CONTINUE; } }); } public static boolean deleteDirectoryIfExists(Path path) throws IOException { if(!Files.exists(path)) { return false; } deleteDirectory(path); return true; } public static void deleteEmptyDirectories(Path path) throws IOException { deleteDirectorySelectively(path, DELETE_EMPTY_DIRECTORIES); } public static boolean isEmptyRecursive(Path path) { if(!Files.isDirectory(path)) { return false; } final List<Path> children = listQuietly(path); for(int i = 0; i < children.size(); i++) { final Path child = children.get(i); if(!Files.isDirectory(child)) { return false; } children.addAll(listQuietly(child)); } return true; } public static TRLList<Path> listQuietly(Path path) { try { return list(path); } catch(IOException ignored) {} return new TRLList<>(path.resolve("...LIST OPERATION FAILED...")); } public static TRLList<Path> list(Path path) throws IOException { try(final Stream<Path> list = Files.list(path)) { return list.collect(TRLCollectors.toTRLList()); } } public static Path getClassLocation() throws ClassNotFoundException, URISyntaxException { return getClassLocation(ReflectionUtils.getCallerClass()); } public static Path getClassLocation(Class<?> clazz) throws URISyntaxException { return Paths.get(clazz.getProtectionDomain().getCodeSource().getLocation().toURI()); } public static Path download(InputStream inputStream, Path location) throws IOException { try( final ReadableByteChannel channel = Channels.newChannel(inputStream); final FileOutputStream outputStream = new FileOutputStream(location.toString()) ) { outputStream.getChannel().transferFrom(channel, 0, Long.MAX_VALUE); return location; } } public static boolean isParent(Path parent, Path path) { while((path = path.getParent()) != null) { if(path.equals(parent)) { return true; } } return false; } public static boolean isFileTreeEmpty(Path directory) { final BooleanWrapper empty = new BooleanWrapper(true); try { Files.walkFileTree(directory, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attributes) { empty.toggle(); return FileVisitResult.TERMINATE; } }); } catch(IOException ignored) { //IOExceptions are only called when visitor methods throw them; in this case, never } return empty.get(); } public static void transfer(ReadableByteChannel read, WritableByteChannel write) throws IOException { transfer(read, write, 32 * 1024); } public static void transfer( ReadableByteChannel read, WritableByteChannel write, int bufferSize ) throws IOException { final ByteBuffer buffer = ByteBuffer.allocateDirect(bufferSize); while(read.read(buffer) != -1 || buffer.position() > 0) { buffer.flip(); write.write(buffer); buffer.compact(); } read.close(); write.close(); } public static String readFile(Path path) throws IOException { try { return readFile(path, StandardCharsets.UTF_8); } catch(MalformedInputException ex) { //Fall back to ISO-8859-1 return readFile(path, StandardCharsets.ISO_8859_1); } } public static String readFile(Path path, Charset charset) throws IOException { return new String(Files.readAllBytes(path), charset); } public static Path write(Path path, String string) throws IOException { return write(path, string, true); } public static Path write(Path path, String string, boolean forceEndNewline) throws IOException { if(forceEndNewline && !StringUtils.endsWithNewline(string)) { string += LINE_SEPARATOR; } return Files.write(path, string.getBytes(StandardCharsets.UTF_8)); } public static String toStringWithUnixPathSeparators(Path path) { return ensureUnixPathSeparators(path.toString()); } public static String ensureUnixPathSeparators(String path) { return path.replace(PATH_SEPARATOR_WINDOWS, PATH_SEPARATOR_UNIX); } public static String getName(Path path) { final Path name = path.getFileName(); return name == null ? "" : name.toString(); } public static void ensureParentExists(Path path) throws IOException { final Path parent = path.getParent(); if(parent != null) { Files.createDirectories(parent); } } public static TRLList<Path> getPathsMatchingGlobs(Path parentDirectory, String glob) throws IOException { glob = ensureUnixPathSeparators(glob); final String[] elements = StringUtils.split(glob, PATH_SEPARATOR_UNIX); final TRLList<Path> directories = new TRLList<>(); directories.add(parentDirectory); if(elements.length > 1) { for(int i = 0; i < elements.length - 1; i++) { final String element = elements[i]; final TRLList<Path> nextDirectories = new TRLList<>(); for(Path directory : directories) { try(final DirectoryStream<Path> stream = Files.newDirectoryStream(directory, element)) { stream.forEach(path -> { if(Files.isDirectory(path)) { nextDirectories.add(path); } }); } } if(nextDirectories.isEmpty()) { return new TRLList<>(); } directories.clear(); directories.addAll(nextDirectories); } } final TRLList<Path> paths = new TRLList<>(); final String finalGlob = ArrayUtils.last(elements); for(Path directory : directories) { try(final DirectoryStream<Path> stream = Files.newDirectoryStream(directory, finalGlob)) { stream.forEach(path -> paths.add(path.toAbsolutePath().normalize())); } } return paths; } public static void touch(Path path) throws IOException { if(!Files.exists(path)) { Files.createFile(path); } final long millis = System.currentTimeMillis(); Files.setLastModifiedTime(path, FileTime.fromMillis(millis)); if(Files.getLastModifiedTime(path).toMillis() != millis) { throw new IOException("Could not set last modification time: " + path); } } public static boolean canTouch(Path path) { try { final FileTime originalTime = Files.getLastModifiedTime(path); touch(path); Files.setLastModifiedTime(path, originalTime); return true; } catch(IOException ignored) {} return false; } public static boolean existsAndCanTouch(Path path) { return Files.exists(path) && canTouch(path); } public static boolean existsAndCantTouch(Path path) { return Files.exists(path) && !canTouch(path); } public static TRLList<String> readLines(InputStream stream) { return new TRLList<>(StringUtils.NEWLINE.split(readString(stream))); } //Taken from http://stackoverflow.com/a/5445161/5076824 public static String readString(InputStream stream) { @SuppressWarnings("resource") final Scanner scanner = new Scanner(stream, StandardCharsets.UTF_8.name()).useDelimiter("\\A"); final String string = scanner.hasNext() ? scanner.next() : ""; scanner.close(); return string; } //Taken from https://stackoverflow.com/a/5599842/5076824 public static String getReadableFileSizeSI(long size) { if(size <= 0) { return "0 " + SI_UNITS[0]; } final int groups = (int) (Math.log10(size) / LOG_1024); //log1024(size) return new DecimalFormat("#,##0.#"). format(size / Math.pow(1024, groups)) + " " + SI_UNITS[groups]; } public static String getReadableFileSizeBinary(long size) { if(size <= 0) { return "0 " + BINARY_UNITS[0]; } final int groups = (int) (Math.log10(size) / 3); //log1000(size) return new DecimalFormat("#,##0.#"). format(size / Math.pow(1000, groups)) + " " + BINARY_UNITS[groups]; } }
package hudson.plugins.jira; import com.atlassian.jira.rest.client.api.JiraRestClient; import com.atlassian.jira.rest.client.api.RestClientException; import com.atlassian.jira.rest.client.api.domain.Issue; import com.atlassian.jira.rest.client.api.domain.Version; import com.atlassian.jira.rest.client.internal.async.AsynchronousJiraRestClientFactory; import com.cloudbees.hudson.plugins.folder.AbstractFolder; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import hudson.Extension; import hudson.Util; import hudson.model.*; import hudson.plugins.jira.model.JiraIssue; import hudson.plugins.jira.model.JiraVersion; import hudson.util.FormValidation; import hudson.util.Secret; import org.joda.time.DateTime; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; import javax.annotation.CheckForNull; import javax.annotation.Nullable; import java.io.IOException; import java.io.PrintStream; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import static org.apache.commons.lang.StringUtils.isEmpty; import static org.apache.commons.lang.StringUtils.isNotEmpty; /** * Represents an external JIRA installation and configuration * needed to access this JIRA. * * @author Kohsuke Kawaguchi */ public class JiraSite extends AbstractDescribableImpl<JiraSite> { private static final Logger LOGGER = Logger.getLogger(JiraSite.class.getName()); /** * Regexp pattern that identifies JIRA issue token. * If this pattern changes help pages (help-issue-pattern_xy.html) must be updated * First char must be a letter, then at least one letter, digit or underscore. * See issue JENKINS-729, JENKINS-4092 */ public static final Pattern DEFAULT_ISSUE_PATTERN = Pattern.compile("([a-zA-Z][a-zA-Z0-9_]+-[1-9][0-9]*)([^.]|\\.[^0-9]|\\.$|$)"); /** * Default rest api client calls timeout, in seconds * See issue JENKINS-31113 */ public static final int DEFAULT_TIMEOUT = 10; /** * URL of JIRA for Jenkins access, like <tt>http://jira.codehaus.org/</tt>. * Mandatory. Normalized to end with '/' */ public final URL url; /** * URL of JIRA for normal access, like <tt>http://jira.codehaus.org/</tt>. * Mandatory. Normalized to end with '/' */ public final URL alternativeUrl; /** * JIRA requires HTTP Authentication for login */ public final boolean useHTTPAuth; /** * User name needed to login. Optional. */ public final String userName; /** * Password needed to login. Optional. */ public final Secret password; /** * Group visibility to constrain the visibility of the added comment. Optional. */ public final String groupVisibility; /** * Role visibility to constrain the visibility of the added comment. Optional. */ public final String roleVisibility; /** * True if this JIRA is configured to allow Confluence-style Wiki comment. */ public final boolean supportsWikiStyleComment; /** * to record scm changes in jira issue * * @since 1.21 */ public final boolean recordScmChanges; /** * Disable annotating the changelogs * * @since todo */ public boolean disableChangelogAnnotations; /** * user defined pattern * * @since 1.22 */ private final String userPattern; private transient Pattern userPat; /** * updated jira issue for all status * * @since 1.22 */ public final boolean updateJiraIssueForAllStatus; /** * timeout used when calling jira rest api, in seconds */ public Integer timeout; /** * Configuration for formatting (date -> text) in jira comments. */ private String dateTimePattern; /** * To add scm entry change date and time in jira comments. * */ private Boolean appendChangeTimestamp; /** * List of project keys (i.e., "MNG" portion of "MNG-512"), * last time we checked. Copy on write semantics. */ // TODO: seems like this is never invalidated (never set to null) // should we implement to invalidate this (say every hour)? private transient volatile Set<String> projects; private transient Cache<String, Optional<Issue>> issueCache = makeIssueCache(); /** * Used to guard the computation of {@link #projects} */ private transient Lock projectUpdateLock = new ReentrantLock(); private transient JiraSession jiraSession = null; @DataBoundConstructor public JiraSite(URL url, @CheckForNull URL alternativeUrl, String userName, String password, boolean supportsWikiStyleComment, boolean recordScmChanges, @CheckForNull String userPattern, boolean updateJiraIssueForAllStatus, @CheckForNull String groupVisibility, @CheckForNull String roleVisibility, boolean useHTTPAuth) { if (url != null && !url.toExternalForm().endsWith("/")) try { url = new URL(url.toExternalForm() + "/"); } catch (MalformedURLException e) { throw new AssertionError(e); } if (alternativeUrl != null && !alternativeUrl.toExternalForm().endsWith("/")) try { alternativeUrl = new URL(alternativeUrl.toExternalForm() + "/"); } catch (MalformedURLException e) { throw new AssertionError(e); } this.url = url; this.timeout = JiraSite.DEFAULT_TIMEOUT; this.alternativeUrl = alternativeUrl; this.userName = Util.fixEmpty(userName); this.password = Secret.fromString(Util.fixEmpty(password)); this.supportsWikiStyleComment = supportsWikiStyleComment; this.recordScmChanges = recordScmChanges; this.userPattern = Util.fixEmpty(userPattern); if (this.userPattern != null) { this.userPat = Pattern.compile(this.userPattern); } else { this.userPat = null; } this.updateJiraIssueForAllStatus = updateJiraIssueForAllStatus; this.groupVisibility = Util.fixEmpty(groupVisibility); this.roleVisibility = Util.fixEmpty(roleVisibility); this.useHTTPAuth = useHTTPAuth; this.jiraSession = null; } @DataBoundSetter public void setDisableChangelogAnnotations(boolean disableChangelogAnnotations) { this.disableChangelogAnnotations = disableChangelogAnnotations; } public boolean getDisableChangelogAnnotations() { return disableChangelogAnnotations; } /** * Sets request timeout (in seconds). * If not specified, a default timeout will be used. * @param timeoutSec Timeout in seconds */ @DataBoundSetter public void setTimeout(Integer timeoutSec) { this.timeout = timeoutSec; } @DataBoundSetter public void setDateTimePattern(String dateTimePattern) { this.dateTimePattern = dateTimePattern; } @DataBoundSetter public void setAppendChangeTimestamp(Boolean appendChangeTimestamp) { this.appendChangeTimestamp = appendChangeTimestamp; } public String getDateTimePattern() { return dateTimePattern; } public boolean isAppendChangeTimestamp() { return this.appendChangeTimestamp != null && this.appendChangeTimestamp.booleanValue(); } protected Object readResolve() { projectUpdateLock = new ReentrantLock(); issueCache = makeIssueCache(); jiraSession = null; return this; } private static Cache<String, Optional<Issue>> makeIssueCache() { return CacheBuilder.newBuilder().concurrencyLevel(2).expireAfterAccess(2, TimeUnit.MINUTES).build(); } public String getName() { return url.toExternalForm(); } /** * Gets a remote access session to this JIRA site. * Creates one if none exists already. * * @return null if remote access is not supported. */ @Nullable public JiraSession getSession() throws IOException { if (jiraSession == null) { jiraSession = createSession(); } return jiraSession; } /** * Creates a remote access session to this JIRA. * * @return null if remote access is not supported. */ protected JiraSession createSession() throws IOException { if (userName == null || password == null) return null; // remote access not supported final URI uri; try { uri = url.toURI(); } catch (URISyntaxException e) { LOGGER.warning("convert URL to URI error: " + e.getMessage()); throw new RuntimeException("failed to create JiraSession due to convert URI error"); } LOGGER.fine("creating JIRA Session: " + uri); final JiraRestClient jiraRestClient = new AsynchronousJiraRestClientFactory() .createWithBasicHttpAuthentication(uri, userName, password.getPlainText()); int usedTimeout = timeout != null ? timeout : JiraSite.DEFAULT_TIMEOUT; return new JiraSession(this, new JiraRestService(uri, jiraRestClient, userName, password.getPlainText(), usedTimeout)); } /** * @return the server URL */ @Nullable public URL getUrl() { return Objects.firstNonNull(this.url, this.alternativeUrl); } /** * Computes the URL to the given issue. */ public URL getUrl(JiraIssue issue) throws IOException { return getUrl(issue.getKey()); } /** * Computes the URL to the given issue. */ public URL getUrl(String id) throws MalformedURLException { return new URL(url, "browse/" + id.toUpperCase()); } /** * Computes the alternative link URL to the given issue. */ public URL getAlternativeUrl(String id) throws MalformedURLException { return alternativeUrl == null ? null : new URL(alternativeUrl, "browse/" + id.toUpperCase()); } /** * Gets the user-defined issue pattern if any. * * @return the pattern or null */ public Pattern getUserPattern() { if (userPattern == null) { return null; } if (userPat == null) { // We don't care about any thread race- or visibility issues here. // The worst thing which could happen, is that the pattern // is compiled multiple times. userPat = Pattern.compile(userPattern); } return userPat; } public Pattern getIssuePattern() { if (getUserPattern() != null) { return getUserPattern(); } return DEFAULT_ISSUE_PATTERN; } /** * Gets the list of project IDs in this JIRA. * This information could be bit old, or it can be null. */ public Set<String> getProjectKeys() { if (projects == null) { try { if (projectUpdateLock.tryLock(3, TimeUnit.SECONDS)) { try { if (projects == null) { JiraSession session = getSession(); if (session != null) { projects = Collections.unmodifiableSet(session.getProjectKeys()); } } } catch (IOException e) { // in case of error, set empty set to avoid trying the same thing repeatedly. LOGGER.log(Level.WARNING, "Failed to obtain JIRA project list", e); } finally { projectUpdateLock.unlock(); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); // process this interruption later } } // fall back to empty if failed to talk to the server Set<String> p = projects; if (p == null) { return Collections.emptySet(); } return p; } /** * Gets the effective {@link JiraSite} associated with the given project. * * @return null * if no such was found. */ public static JiraSite get(Job<?, ?> p) { JiraProjectProperty jpp = p.getProperty(JiraProjectProperty.class); if (jpp != null) { // Looks in global configuration for the site configured JiraSite site = jpp.getSite(); if (site != null) { return site; } } // Check up the folder chain if a site is defined there // This only supports one site per folder ItemGroup parent = p.getParent(); while (parent != null) { if (parent instanceof AbstractFolder) { AbstractFolder folder = (AbstractFolder) parent; JiraFolderProperty jfp = (JiraFolderProperty) folder.getProperties().get(JiraFolderProperty.class); if (jfp != null) { JiraSite[] sites = jfp.getSites(); if (sites != null && sites.length > 0) { return sites[0]; } } } if (parent instanceof Item) { parent = ((Item) parent).getParent(); } else { parent = null; } } // none is explicitly configured. try the default --- // if only one is configured, that must be it. JiraSite[] sites = JiraProjectProperty.DESCRIPTOR.getSites(); if (sites.length == 1) { return sites[0]; } return null; } /** * Checks if the given JIRA id will be likely to exist in this issue tracker. * This method checks whether the key portion is a valid key (except that * it can potentially use stale data). Number portion is not checked at all. * * @deprecated Use getIssue instead * @param id String like MNG-1234 */ @Deprecated public boolean existsIssue(String id) { int idx = id.indexOf('-'); if (idx == -1) { return false; } Set<String> keys = getProjectKeys(); return keys.contains(id.substring(0, idx).toUpperCase()); } /** * Returns the remote issue with the given id or <code>null</code> if it wasn't found. */ @CheckForNull public JiraIssue getIssue(final String id) throws IOException { try { Optional<Issue> issue = issueCache.get(id, new Callable<Optional<Issue>>() { public Optional<Issue> call() throws Exception { JiraSession session = getSession(); Issue issue = null; if (session != null) { issue = session.getIssue(id); } return Optional.fromNullable(issue); } }); if (!issue.isPresent()) { return null; } return new JiraIssue(issue.get()); } catch (ExecutionException e) { throw new IOException(e); } } /** * Release a given version. * * @param projectKey The Project Key * @param versionName The name of the version * @throws IOException */ public void releaseVersion(String projectKey, String versionName) throws IOException { JiraSession session = getSession(); if (session != null) { List<Version> versions = session.getVersions(projectKey); if (versions == null || versions.isEmpty()) { return; } for (Version version : versions) { if (version.getName().equals(versionName)) { Version releaseVersion = new Version(version.getSelf(), version.getId(), version.getName(), version.getDescription(), version.isArchived(), true, new DateTime()); session.releaseVersion(projectKey, releaseVersion); return; } } } } /** * Returns all versions for the given project key. * * @param projectKey Project Key * @return A set of JiraVersions * @throws IOException */ public Set<JiraVersion> getVersions(String projectKey) throws IOException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return Collections.emptySet(); } List<Version> versions = session.getVersions(projectKey); if (versions == null) { return Collections.emptySet(); } Set<JiraVersion> versionsSet = new HashSet<>(versions.size()); for (Version version : versions) { versionsSet.add(new JiraVersion(version)); } return versionsSet; } /** * Generates release notes for a given version. * * @param projectKey * @param versionName * @return release notes * @throws IOException, TimeoutException */ public String getReleaseNotesForFixVersion(String projectKey, String versionName) throws IOException, TimeoutException { return getReleaseNotesForFixVersion(projectKey, versionName, ""); } /** * Generates release notes for a given version. * * @param projectKey * @param versionName * @param filter Additional JQL Filter. Example: status in (Resolved,Closed) * @return release notes * @throws IOException, TimeoutException */ public String getReleaseNotesForFixVersion(String projectKey, String versionName, String filter) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return ""; } List<Issue> issues = session.getIssuesWithFixVersion(projectKey, versionName, filter); if (issues == null) { return ""; } Map<String, Set<String>> releaseNotes = new HashMap<>(); for (Issue issue : issues) { String key = issue.getKey(); String summary = issue.getSummary(); String status = issue.getStatus().getName(); String type = "UNKNOWN"; if (issue.getIssueType() != null && issue.getIssueType().getName() != null) { type = issue.getIssueType().getName(); } Set<String> issueSet; if (!releaseNotes.containsKey(type)) { issueSet = new HashSet<>(); releaseNotes.put(type, issueSet); } else { issueSet = releaseNotes.get(type); } issueSet.add(String.format(" - [%s] %s (%s)", key, summary, status)); } StringBuilder sb = new StringBuilder(); for (String type : releaseNotes.keySet()) { sb.append(String.format("# %s\n", type)); for (String issue : releaseNotes.get(type)) { sb.append(issue); sb.append("\n"); } } return sb.toString(); } /** * Gets a set of issues that have the given fixVersion associated with them. * * <p> * Kohsuke: this seems to fail if {@link JiraSite#useHTTPAuth} is on. What is the motivation behind JIRA site? * * @param projectKey The project key * @param versionName The fixVersion * @return A set of JiraIssues * @throws IOException, TimeoutException */ public Set<JiraIssue> getIssueWithFixVersion(String projectKey, String versionName) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { return Collections.emptySet(); } List<Issue> issues = session.getIssuesWithFixVersion(projectKey, versionName); if (issues == null || issues.isEmpty()) { return Collections.emptySet(); } Set<JiraIssue> issueSet = new HashSet<>(issues.size()); for (Issue issue : issues) { issueSet.add(new JiraIssue(issue)); } return issueSet; } /** * Migrates issues matching the jql query provided to a new fix version. * * @param projectKey The project key * @param toVersion The new fixVersion * @param query A JQL Query * @throws IOException, TimeoutException */ public void replaceFixVersion(String projectKey, String fromVersion, String toVersion, String query) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return; } session.replaceFixVersion(projectKey, fromVersion, toVersion, query); } /** * Migrates issues matching the jql query provided to a new fix version. * * @param projectKey The project key * @param versionName The new fixVersion * @param query A JQL Query * @throws IOException, TimeoutException */ public void migrateIssuesToFixVersion(String projectKey, String versionName, String query) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return; } session.migrateIssuesToFixVersion(projectKey, versionName, query); } /** * Adds new fix version to issues matching the jql. * * @param projectKey * @param versionName * @param query * @throws IOException, TimeoutException */ public void addFixVersionToIssue(String projectKey, String versionName, String query) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return; } session.addFixVersion(projectKey, versionName, query); } /** * Progresses all issues matching the JQL search, using the given workflow action. Optionally * adds a comment to the issue(s) at the same time. * * @param jqlSearch * @param workflowActionName * @param comment * @param console * @throws IOException, TimeoutException */ public boolean progressMatchingIssues(String jqlSearch, String workflowActionName, String comment, PrintStream console) throws IOException, TimeoutException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); console.println(Messages.FailedToConnect()); return false; } boolean success = true; List<Issue> issues = session.getIssuesFromJqlSearch(jqlSearch); if (isEmpty(workflowActionName)) { console.println("[JIRA] No workflow action was specified, " + "thus no status update will be made for any of the matching issues."); } for (Issue issue : issues) { String issueKey = issue.getKey(); if (isNotEmpty(comment)) { session.addComment(issueKey, comment, null, null); } if (isEmpty(workflowActionName)) { continue; } Integer actionId = session.getActionIdForIssue(issueKey, workflowActionName); if (actionId == null) { LOGGER.fine(String.format("Invalid workflow action %s for issue %s; issue status = %s", workflowActionName, issueKey, issue.getStatus())); console.println(Messages.JiraIssueUpdateBuilder_UnknownWorkflowAction(issueKey, workflowActionName)); success = false; continue; } String newStatus = session.progressWorkflowAction(issueKey, actionId); console.println(String.format("[JIRA] Issue %s transitioned to \"%s\" due to action \"%s\".", issueKey, newStatus, workflowActionName)); } return success; } @Extension public static class DescriptorImpl extends Descriptor<JiraSite> { @Override public String getDisplayName() { return "JIRA Site"; } /** * Checks if the user name and password are valid. */ public FormValidation doValidate(@QueryParameter String userName, @QueryParameter String url, @QueryParameter String password, @QueryParameter String groupVisibility, @QueryParameter String roleVisibility, @QueryParameter boolean useHTTPAuth, @QueryParameter String alternativeUrl, @QueryParameter Integer timeout) throws IOException { url = Util.fixEmpty(url); alternativeUrl = Util.fixEmpty(alternativeUrl); URL mainURL, alternativeURL = null; try{ if (url == null) { return FormValidation.error("No URL given"); } mainURL = new URL(url); } catch (MalformedURLException e){ return FormValidation.error(String.format("Malformed URL (%s)", url), e ); } try { if (alternativeUrl != null) { alternativeURL = new URL(alternativeUrl); } }catch (MalformedURLException e){ return FormValidation.error(String.format("Malformed alternative URL (%s)",alternativeUrl), e ); } JiraSite site = new JiraSite(mainURL, alternativeURL, userName, password, false, false, null, false, groupVisibility, roleVisibility, useHTTPAuth); site.setTimeout(timeout); try { JiraSession session = site.createSession(); session.getMyPermissions(); return FormValidation.ok("Success"); } catch (RestClientException e) { LOGGER.log(Level.WARNING, "Failed to login to JIRA at " + url, e); } return FormValidation.error("Failed to login to JIRA"); } } public void addVersion(String version, String projectKey) throws IOException { JiraSession session = getSession(); if (session == null) { LOGGER.warning("JIRA session could not be established"); return; } session.addVersion(version, projectKey); } }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.mail.service.impl; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.kuali.rice.core.api.mail.EmailContent; import org.kuali.rice.core.api.util.ClasspathOrFileResourceLoader; import org.kuali.rice.core.api.util.RiceConstants; import org.kuali.rice.core.api.util.xml.XmlHelper; import org.kuali.rice.core.api.util.xml.XmlJotter; import org.kuali.rice.coreservice.api.style.StyleService; import org.kuali.rice.kew.api.KewApiConstants; import org.kuali.rice.kew.api.WorkflowRuntimeException; import org.kuali.rice.kew.api.action.ActionItem; import org.kuali.rice.kew.api.util.CodeTranslator; import org.kuali.rice.kew.doctype.bo.DocumentType; import org.kuali.rice.kew.feedback.web.FeedbackForm; import org.kuali.rice.kew.mail.CustomEmailAttribute; import org.kuali.rice.kew.mail.EmailStyleHelper; import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue; import org.kuali.rice.kew.routeheader.service.RouteHeaderService; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.user.UserUtils; import org.kuali.rice.kim.api.group.Group; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.kim.api.identity.principal.Principal; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.krad.util.GlobalVariables; import org.kuali.rice.krad.util.ObjectUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import java.io.StringWriter; import java.util.Collection; import java.util.Date; import java.util.Map; /** * EmailContentService that serves EmailContent customizable via XSLT style sheets * The global email style name is: kew.email.style * If this style is not found, the resource 'defaultEmailStyle.xsl' will be retrieved * relative to this class. * @author Kuali Rice Team (rice.collab@kuali.org) */ public class StyleableEmailContentServiceImpl extends BaseEmailContentServiceImpl { private static final Logger LOG = Logger.getLogger(StyleableEmailContentServiceImpl.class); protected final String DEFAULT_EMAIL_STYLESHEET_RESOURCE_LOC = "defaultEmailStyle.xsl"; protected StyleService styleService; protected EmailStyleHelper styleHelper = new EmailStyleHelper(); protected String globalEmailStyleSheet = KewApiConstants.EMAIL_STYLESHEET_NAME; protected RouteHeaderService routeHeaderService; public void setStyleService(StyleService styleService) { this.styleService = styleService; } public void setGlobalEmailStyleSheet(String globalEmailStyleSheet) { this.globalEmailStyleSheet = globalEmailStyleSheet; } protected static DocumentBuilder getDocumentBuilder(boolean coalesce) { try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setCoalescing(coalesce); return dbf.newDocumentBuilder(); } catch (ParserConfigurationException e) { String message = "Error constructing document builder"; LOG.error(message, e); throw new WorkflowRuntimeException(message, e); } } protected static void addObjectXML(Document doc, Object o, Node node, String name) throws Exception { Element element = XmlHelper.propertiesToXml(doc, o, name); if (LOG.isDebugEnabled()) { LOG.debug(XmlJotter.jotNode(element)); } if (node == null) { node = doc; } node.appendChild(element); } protected static void addTextElement(Document doc, Element baseElement, String elementName, Object elementData) { Element element = doc.createElement(elementName); String dataValue = ""; if (elementData != null) { dataValue = elementData.toString(); } element.appendChild(doc.createTextNode(dataValue)); baseElement.appendChild(element); } protected static void addCDataElement(Document doc, Element baseElement, String elementName, Object elementData) { Element element = doc.createElement(elementName); String dataValue = ""; if (elementData != null) { dataValue = elementData.toString(); } element.appendChild(doc.createCDATASection(dataValue)); baseElement.appendChild(element); } protected static void addTimestampElement(Document doc, Element baseElement, String elementName, Date elementData) { addTextElement(doc, baseElement, elementName, RiceConstants.getDefaultDateFormat().format(elementData)); } protected static void addDelegatorElement(Document doc, Element baseElement, ActionItem actionItem) { Element delegatorElement = doc.createElement("delegator"); if ( (actionItem.getDelegatorPrincipalId() != null) && (actionItem.getDelegatorPrincipalId() != null) ) { // add empty delegator element baseElement.appendChild(delegatorElement); return; } String delegatorType = ""; String delegatorId = ""; String delegatorDisplayValue = ""; if (actionItem.getDelegatorPrincipalId() != null) { delegatorType = "user"; delegatorId = actionItem.getDelegatorPrincipalId(); Principal delegator = KimApiServiceLocator.getIdentityService().getPrincipal(delegatorId); if (delegator == null) { LOG.error("Cannot find user for id " + delegatorId); delegatorDisplayValue = "USER NOT FOUND"; } else { delegatorDisplayValue = UserUtils.getTransposedName(GlobalVariables.getUserSession(), delegator); } } else if (actionItem.getDelegatorPrincipalId() != null) { delegatorType = "workgroup"; delegatorId = actionItem.getDelegatorGroupId().toString(); delegatorDisplayValue = KimApiServiceLocator.getGroupService().getGroup(actionItem.getDelegatorGroupId()).getName(); } delegatorElement.setAttribute("type", delegatorType); // add the id element Element idElement = doc.createElement("id"); idElement.appendChild(doc.createTextNode(delegatorId)); delegatorElement.appendChild(idElement); // add the display value element Element displayValElement = doc.createElement("displayValue"); displayValElement.appendChild(doc.createTextNode(delegatorDisplayValue)); delegatorElement.appendChild(displayValElement); baseElement.appendChild(delegatorElement); } protected static void addWorkgroupRequestElement(Document doc, Element baseElement, ActionItem actionItem) { Element workgroupElement = doc.createElement("workgroupRequest"); if (actionItem.getGroupId() != null) { // add the id element Element idElement = doc.createElement("id"); idElement.appendChild(doc.createTextNode(actionItem.getGroupId())); workgroupElement.appendChild(idElement); // add the display value element Element displayValElement = doc.createElement("displayValue"); displayValElement.appendChild(doc.createTextNode(actionItem.getGroupId())); workgroupElement.appendChild(displayValElement); } baseElement.appendChild(workgroupElement); } /** * This method is used to add the given {@link ActionItem} to the given {@link org.w3c.dom.Document} in a summarized * form for use in weekly or daily type reminder e-mails. * * @param doc - Document to have the ActionItem added to * @param actionItem - the action item being added * @param user - the current user * @param node - the node object to add the actionItem XML to (defaults to the doc variable if null is passed in) * @throws Exception */ protected void addSummarizedActionItem(Document doc, ActionItem actionItem, Person user, Node node, DocumentRouteHeaderValue routeHeader) throws Exception { if (node == null) { node = doc; } Element root = doc.createElement("summarizedActionItem"); // add in all items from action list as preliminary default dataset addTextElement(doc, root, "documentId", actionItem.getDocumentId()); addTextElement(doc, root, "docName", actionItem.getDocName()); addCDataElement(doc, root, "docLabel", actionItem.getDocLabel()); addCDataElement(doc, root, "docTitle", actionItem.getDocTitle()); //DocumentRouteHeaderValue routeHeader = getRouteHeader(actionItem); addTextElement(doc, root, "docRouteStatus", routeHeader.getDocRouteStatus()); addCDataElement(doc, root, "routeStatusLabel", routeHeader.getRouteStatusLabel()); addTextElement(doc, root, "actionRequestCd", actionItem.getActionRequestCd()); addTextElement(doc, root, "actionRequestLabel", CodeTranslator.getActionRequestLabel( actionItem.getActionRequestCd())); addDelegatorElement(doc, root, actionItem); addTimestampElement(doc, root, "createDate", routeHeader.getCreateDate()); addWorkgroupRequestElement(doc, root, actionItem); if (actionItem.getDateTimeAssigned() != null) addTimestampElement(doc, root, "dateAssigned", actionItem.getDateTimeAssigned().toDate()); node.appendChild(root); } public DocumentRouteHeaderValue getRouteHeader(ActionItem actionItem) { if (routeHeaderService == null) { routeHeaderService = KEWServiceLocator.getRouteHeaderService(); } return routeHeaderService.getRouteHeader(actionItem.getDocumentId()); } protected Map<String,DocumentRouteHeaderValue> getRouteHeaders(Collection<ActionItem> actionItems) { if (routeHeaderService == null) { routeHeaderService = KEWServiceLocator.getRouteHeaderService(); } return routeHeaderService.getRouteHeadersForActionItems(actionItems); } protected static String transform(Templates style, Document doc) { StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); try { style.newTransformer().transform(new DOMSource(doc), result); return writer.toString(); } catch (TransformerException te) { String message = "Error transforming DOM"; LOG.error(message, te); throw new WorkflowRuntimeException(message, te); } } /** * This method retrieves the style from the system using the given name. If none is found the default style xsl file * defined by {@link #DEFAULT_EMAIL_STYLESHEET_RESOURCE_LOC} is used. * * @param styleName * @return a valid {@link javax.xml.transform.Templates} using either the given styleName or the default xsl style file */ protected Templates getStyle(String styleName) { Templates style = null; try { style = styleService.getStyleAsTranslet(styleName); } catch (TransformerConfigurationException tce) { String message = "Error obtaining style '" + styleName + "', using default"; LOG.error(message, tce); // throw new WorkflowRuntimeException("Error obtaining style '" + styleName + "'", tce); } if (style == null) { LOG.warn("Could not find specified style, " + styleName + ", using default"); try { style = TransformerFactory.newInstance().newTemplates(new StreamSource(new ClasspathOrFileResourceLoader().getResource("classpath:org/kuali/rice/kew/mail/" + DEFAULT_EMAIL_STYLESHEET_RESOURCE_LOC).getInputStream())); } catch (Exception tce) { String message = "Error obtaining default style from resource: " + DEFAULT_EMAIL_STYLESHEET_RESOURCE_LOC; LOG.error(message, tce); throw new WorkflowRuntimeException("Error obtaining style '" + styleName + "'", tce); } } return style; } protected EmailContent generateEmailContent(String styleName, Document doc) { Templates style = getStyle(styleName); return styleHelper.generateEmailContent(style, doc); } protected EmailContent generateReminderForActionItems(Person user, Collection<ActionItem> actionItems, String name, String style) { DocumentBuilder db = getDocumentBuilder(false); Document doc = db.newDocument(); Element element = doc.createElement(name); Map<String,DocumentRouteHeaderValue> routeHeaders = getRouteHeaders(actionItems); setStandardAttributes(element); doc.appendChild(element); try { addObjectXML(doc, user, element, "user"); for (ActionItem actionItem: actionItems) { try { addSummarizedActionItem(doc, actionItem, user, element, routeHeaders.get(actionItem.getDocumentId())); } catch (Exception e) { String message = "Error generating XML for action item: " + actionItem; LOG.error(message, e); throw new WorkflowRuntimeException(e); } } } catch (Exception e) { String message = "Error generating XML for action items: " + actionItems; LOG.error(message, e); throw new WorkflowRuntimeException(e); } return generateEmailContent(style, doc); } protected void setStandardAttributes(Element e) { e.setAttribute("env", getDeploymentEnvironment()); e.setAttribute("applicationEmailAddress", getApplicationEmailAddress()); e.setAttribute("actionListUrl", getActionListUrl()); e.setAttribute("preferencesUrl", getPreferencesUrl()); e.setAttribute("routeLogUrl", getRouteLogUrl()); } /** * This method generates an {@link EmailContent} object using the given parameters. Part of this operation includes * serializing the given {@link ActionItem} to XML. The following objects and methods are included in the serialization: * * <ul> * <li>{@link Person}</li> * <li>{@link Person#getPrincipalName()}</li> * <li>{@link DocumentRouteHeaderValue}</li> * <li>{@link DocumentRouteHeaderValue#getInitiatorUser()}</li> * <li>{@link DocumentRouteHeaderValue#getDocumentType()}</li> * <li>{@link Person}</li> * </ul> * * @param user - the current user * @param actionItem - the action item being added * @param documentType - the document type that the custom email style sheet will come from * @param node - the node object to add the actionItem XML to (defaults to the doc variable if null is passed in) * @throws Exception */ @Override public EmailContent generateImmediateReminder(Person user, ActionItem actionItem, DocumentType documentType) { if (user != null) { LOG.info("Starting generation of immediate email reminder..."); LOG.info("Action Id: " + actionItem.getId() + "; ActionRequestId: " + actionItem.getActionRequestId() + "; Action Item Principal Id: " + actionItem.getPrincipalId()); LOG.info("User Principal Id: " + user.getPrincipalId()); // change style name based on documentType when configurable email style on document is implemented... String styleSheet = documentType.getCustomEmailStylesheet(); LOG.debug(documentType.getName() + " style: " + styleSheet); if (styleSheet == null) { styleSheet = globalEmailStyleSheet; } LOG.info("generateImmediateReminder using style sheet: "+ styleSheet + " for Document Type " + documentType.getName()); // return generateReminderForActionItems(user, actionItems, "immediateReminder", styleSheet); DocumentBuilder db = getDocumentBuilder(false); Document doc = db.newDocument(); Element element = doc.createElement("immediateReminder"); setStandardAttributes(element); doc.appendChild(element); try { addObjectXML(doc, user, element, "user"); // addActionItem(doc, actionItem, user, node); Node node = element; if (node == null) { node = doc; } Element root = doc.createElement("actionItem"); // append the custom body and subject if they exist try { CustomEmailAttribute customEmailAttribute = getCustomEmailAttribute(user, actionItem); if (customEmailAttribute != null) { String customBody = customEmailAttribute.getCustomEmailBody(); if (!org.apache.commons.lang.StringUtils.isEmpty(customBody)) { Element bodyElement = doc.createElement("customBody"); bodyElement.appendChild(doc.createTextNode(customBody)); root.appendChild(bodyElement); } String customEmailSubject = customEmailAttribute.getCustomEmailSubject(); if (!org.apache.commons.lang.StringUtils.isEmpty(customEmailSubject)) { Element subjectElement = doc.createElement("customSubject"); subjectElement.appendChild(doc.createTextNode(customEmailSubject)); root.appendChild(subjectElement); } } } catch (Exception e) { LOG.error("Error when checking for custom email body and subject.", e); } Person person = KimApiServiceLocator.getPersonService().getPerson(actionItem.getPrincipalId()); DocumentRouteHeaderValue header = getRouteHeader(actionItem); // keep adding stuff until we have all the xml we need to formulate the message :/ addObjectXML(doc, actionItem, root, "actionItem"); addObjectXML(doc, person, root, "actionItemPerson"); addTextElement(doc, root, "actionItemPrincipalId", person.getPrincipalId()); addTextElement(doc, root, "actionItemPrincipalName", person.getPrincipalName()); addDocumentHeaderXML(doc, header, root, "doc"); addObjectXML(doc, header.getInitiatorPrincipal(), root, "docInitiator"); addTextElement(doc, root, "docInitiatorDisplayName", header.getInitiatorDisplayName()); if (ObjectUtils.isNotNull(actionItem.getGroupId())) { Group group = KimApiServiceLocator.getGroupService().getGroup(actionItem.getGroupId()); addTextElement(doc, root, "groupName", group.getName()); } addObjectXML(doc, header.getDocumentType(), root, "documentType"); node.appendChild(root); } catch (Exception e) { String message = "Error generating immediate reminder XML for action item: " + actionItem; LOG.error(message, e); throw new WorkflowRuntimeException(e); } LOG.info("Leaving generation of immeidate email reminder..."); return generateEmailContent(styleSheet, doc); } LOG.info("Skipping generation of immediate email reminder due to the user being null"); return null; } /** * This method handles converting the DocumentRouteHeaderValue into an XML representation. The reason we can't just use * propertiesToXml like we have elsewhere is because the doc header has a String attached to it that has the XML document * content in it. The default serialization of this will serialize this as a String so we will end up with escaped XML * in our output which we won't be able to process with the email stylesheet. So we need to read the xml content from * the document and parse it into a DOM object so it can be appended to our output. */ protected void addDocumentHeaderXML(Document document, DocumentRouteHeaderValue documentHeader, Node node, String elementName) throws Exception { Element element = XmlHelper.propertiesToXml(document, documentHeader, elementName); // now we need to "fix" the xml document content because it's going to be in there as escaped XML Element docContentElement = (Element)element.getElementsByTagName("docContent").item(0); String documentContent = docContentElement.getTextContent(); if (!StringUtils.isBlank(documentContent) && documentContent.startsWith("<")) { Document documentContentXML = XmlHelper.readXml(documentContent); Element documentContentElement = documentContentXML.getDocumentElement(); documentContentElement = (Element)document.importNode(documentContentElement, true); // remove the old, bad text content docContentElement.removeChild(docContentElement.getFirstChild()); // replace with actual XML docContentElement.appendChild(documentContentElement); } else { // in this case it means that the XML is encrypted, unfortunately, we have no way to decrypt it since // the key is stored in the client application. We will just include the doc content since none of our // current IU clients will be using this feature right away // remove the old, bad text content docContentElement.removeChild(docContentElement.getFirstChild()); } if (LOG.isDebugEnabled()) { LOG.debug(XmlJotter.jotNode(element)); } node.appendChild(element); } @Override public EmailContent generateWeeklyReminder(Person user, Collection<ActionItem> actionItems) { return generateReminderForActionItems(user, actionItems, "weeklyReminder", globalEmailStyleSheet); } @Override public EmailContent generateDailyReminder(Person user, Collection<ActionItem> actionItems) { return generateReminderForActionItems(user, actionItems, "dailyReminder", globalEmailStyleSheet); } @Override public EmailContent generateFeedback(FeedbackForm form) { DocumentBuilder db = getDocumentBuilder(true); Document doc = db.newDocument(); String styleSheet = globalEmailStyleSheet; // if the doc type is specified, see if that doc has a custom email stylesheet and use it // NOTE: do we need to do this for feedback? presumably feedback will be going back to admins /*String docTypeName = form.getDocumentType(); if (!StringUtils.isBlank(docTypeName)) { DocumentType docType = KEWServiceLocator.getDocumentTypeService().findByName(docTypeName); if (docType == null) { LOG.error("User specified document type '" + docTypeName + "' in feedback form, but the document type was not found in the system"); } else { if (docType.getCustomEmailStylesheet() != null) { styleSheet = docType.getCustomEmailStylesheet(); } } }*/ LOG.info("form: " + form.getDocumentId()); try { addObjectXML(doc, form, null, "feedback"); } catch (Exception e) { String message = "Error generating XML for feedback form: " + form; LOG.error(message, e); throw new WorkflowRuntimeException(message, e); } setStandardAttributes(doc.getDocumentElement()); return generateEmailContent(styleSheet, doc); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode.ha; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster; import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster.Builder; import org.apache.hadoop.hdfs.qjournal.server.Journal; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.util.BestEffortLongFile; import org.apache.hadoop.hdfs.util.PersistentLongFile; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Before; import org.junit.Test; import com.google.common.base.Joiner; import org.mockito.internal.util.reflection.Whitebox; /** * Tests for upgrading with HA enabled. */ public class TestDFSUpgradeWithHA { private static final Log LOG = LogFactory.getLog(TestDFSUpgradeWithHA.class); private Configuration conf; @Before public void createConfiguration() { conf = new HdfsConfiguration(); // Turn off persistent IPC, so that the DFSClient can survive NN restart conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, 0); } private static void assertCTimesEqual(MiniDFSCluster cluster) { long nn1CTime = cluster.getNamesystem(0).getFSImage().getStorage().getCTime(); long nn2CTime = cluster.getNamesystem(1).getFSImage().getStorage().getCTime(); assertEquals(nn1CTime, nn2CTime); } private static void checkClusterPreviousDirExistence(MiniDFSCluster cluster, boolean shouldExist) { for (int i = 0; i < 2; i++) { checkNnPreviousDirExistence(cluster, i, shouldExist); } } private static void checkNnPreviousDirExistence(MiniDFSCluster cluster, int index, boolean shouldExist) { Collection<URI> nameDirs = cluster.getNameDirs(index); for (URI nnDir : nameDirs) { checkPreviousDirExistence(new File(nnDir), shouldExist); } } private static void checkJnPreviousDirExistence(MiniQJMHACluster jnCluster, boolean shouldExist) throws IOException { for (int i = 0; i < 3; i++) { checkPreviousDirExistence( jnCluster.getJournalCluster().getJournalDir(i, "ns1"), shouldExist); } if (shouldExist) { assertEpochFilesCopied(jnCluster); } } private static void assertEpochFilesCopied(MiniQJMHACluster jnCluster) throws IOException { for (int i = 0; i < 3; i++) { File journalDir = jnCluster.getJournalCluster().getJournalDir(i, "ns1"); File currDir = new File(journalDir, "current"); File prevDir = new File(journalDir, "previous"); for (String fileName : new String[]{ Journal.LAST_PROMISED_FILENAME, Journal.LAST_WRITER_EPOCH }) { File prevFile = new File(prevDir, fileName); // Possible the prev file doesn't exist, e.g. if there has never been a // writer before the upgrade. if (prevFile.exists()) { PersistentLongFile prevLongFile = new PersistentLongFile(prevFile, -10); PersistentLongFile currLongFile = new PersistentLongFile(new File(currDir, fileName), -11); assertTrue("Value in " + fileName + " has decreased on upgrade in " + journalDir, prevLongFile.get() <= currLongFile.get()); } } } } private static void checkPreviousDirExistence(File rootDir, boolean shouldExist) { File previousDir = new File(rootDir, "previous"); if (shouldExist) { assertTrue(previousDir + " does not exist", previousDir.exists()); } else { assertFalse(previousDir + " does exist", previousDir.exists()); } } private void runFinalizeCommand(MiniDFSCluster cluster) throws IOException { HATestUtil.setFailoverConfigurations(cluster, conf); new DFSAdmin(conf).finalizeUpgrade(); } /** * Ensure that an admin cannot finalize an HA upgrade without at least one NN * being active. */ @Test public void testCannotFinalizeIfNoActive() throws IOException, URISyntaxException { MiniDFSCluster cluster = null; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleHATopology()) .numDataNodes(0) .build(); File sharedDir = new File(cluster.getSharedEditsDir(0, 1)); // No upgrade is in progress at the moment. checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); checkPreviousDirExistence(sharedDir, false); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkPreviousDirExistence(sharedDir, true); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); // Restart NN0 without the -upgrade flag, to make sure that works. cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.REGULAR); cluster.restartNameNode(0, false); // Make sure we can still do FS ops after upgrading. cluster.transitionToActive(0); assertTrue(fs.mkdirs(new Path("/foo3"))); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); // Now restart NN1 and make sure that we can do ops against that as well. cluster.restartNameNode(1); cluster.transitionToStandby(0); cluster.transitionToActive(1); assertTrue(fs.mkdirs(new Path("/foo4"))); assertCTimesEqual(cluster); // Now there's no active NN. cluster.transitionToStandby(1); try { runFinalizeCommand(cluster); fail("Should not have been able to finalize upgrade with no NN active"); } catch (IOException ioe) { GenericTestUtils.assertExceptionContains( "Cannot finalize with no NameNode active", ioe); } } finally { if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } } } /** * Make sure that an HA NN with NFS-based HA can successfully start and * upgrade. */ @Test public void testNfsUpgrade() throws IOException, URISyntaxException { MiniDFSCluster cluster = null; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleHATopology()) .numDataNodes(0) .build(); File sharedDir = new File(cluster.getSharedEditsDir(0, 1)); // No upgrade is in progress at the moment. checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); checkPreviousDirExistence(sharedDir, false); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkPreviousDirExistence(sharedDir, true); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); // Restart NN0 without the -upgrade flag, to make sure that works. cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.REGULAR); cluster.restartNameNode(0, false); // Make sure we can still do FS ops after upgrading. cluster.transitionToActive(0); assertTrue(fs.mkdirs(new Path("/foo3"))); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); // Now restart NN1 and make sure that we can do ops against that as well. cluster.restartNameNode(1); cluster.transitionToStandby(0); cluster.transitionToActive(1); assertTrue(fs.mkdirs(new Path("/foo4"))); assertCTimesEqual(cluster); } finally { if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } } } private long getCommittedTxnIdValue(MiniQJMHACluster qjCluster) throws IOException { Journal journal1 = qjCluster.getJournalCluster().getJournalNode(0) .getOrCreateJournal(MiniQJMHACluster.NAMESERVICE); BestEffortLongFile committedTxnId = (BestEffortLongFile) Whitebox .getInternalState(journal1, "committedTxnId"); return committedTxnId != null ? committedTxnId.get() : HdfsConstants.INVALID_TXID; } /** * Make sure that an HA NN can successfully upgrade when configured using * JournalNodes. */ @Test public void testUpgradeWithJournalNodes() throws IOException, URISyntaxException { MiniQJMHACluster qjCluster = null; FileSystem fs = null; try { Builder builder = new MiniQJMHACluster.Builder(conf); builder.getDfsBuilder() .numDataNodes(0); qjCluster = builder.build(); MiniDFSCluster cluster = qjCluster.getDfsCluster(); // No upgrade is in progress at the moment. checkJnPreviousDirExistence(qjCluster, false); checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // get the value of the committedTxnId in journal nodes final long cidBeforeUpgrade = getCommittedTxnIdValue(qjCluster); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkJnPreviousDirExistence(qjCluster, true); assertTrue(cidBeforeUpgrade <= getCommittedTxnIdValue(qjCluster)); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); // Restart NN0 without the -upgrade flag, to make sure that works. cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.REGULAR); cluster.restartNameNode(0, false); // Make sure we can still do FS ops after upgrading. cluster.transitionToActive(0); assertTrue(fs.mkdirs(new Path("/foo3"))); assertTrue(getCommittedTxnIdValue(qjCluster) > cidBeforeUpgrade); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); // Now restart NN1 and make sure that we can do ops against that as well. cluster.restartNameNode(1); cluster.transitionToStandby(0); cluster.transitionToActive(1); assertTrue(fs.mkdirs(new Path("/foo4"))); assertCTimesEqual(cluster); } finally { if (fs != null) { fs.close(); } if (qjCluster != null) { qjCluster.shutdown(); } } } @Test public void testFinalizeWithJournalNodes() throws IOException, URISyntaxException { MiniQJMHACluster qjCluster = null; FileSystem fs = null; try { Builder builder = new MiniQJMHACluster.Builder(conf); builder.getDfsBuilder() .numDataNodes(0); qjCluster = builder.build(); MiniDFSCluster cluster = qjCluster.getDfsCluster(); // No upgrade is in progress at the moment. checkJnPreviousDirExistence(qjCluster, false); checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); final long cidBeforeUpgrade = getCommittedTxnIdValue(qjCluster); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); assertTrue(cidBeforeUpgrade <= getCommittedTxnIdValue(qjCluster)); assertTrue(fs.mkdirs(new Path("/foo2"))); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkJnPreviousDirExistence(qjCluster, true); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); cluster.restartNameNode(1); final long cidDuringUpgrade = getCommittedTxnIdValue(qjCluster); assertTrue(cidDuringUpgrade > cidBeforeUpgrade); runFinalizeCommand(cluster); assertEquals(cidDuringUpgrade, getCommittedTxnIdValue(qjCluster)); checkClusterPreviousDirExistence(cluster, false); checkJnPreviousDirExistence(qjCluster, false); assertCTimesEqual(cluster); } finally { if (fs != null) { fs.close(); } if (qjCluster != null) { qjCluster.shutdown(); } } } /** * Make sure that even if the NN which initiated the upgrade is in the standby * state that we're allowed to finalize. */ @Test public void testFinalizeFromSecondNameNodeWithJournalNodes() throws IOException, URISyntaxException { MiniQJMHACluster qjCluster = null; FileSystem fs = null; try { Builder builder = new MiniQJMHACluster.Builder(conf); builder.getDfsBuilder() .numDataNodes(0); qjCluster = builder.build(); MiniDFSCluster cluster = qjCluster.getDfsCluster(); // No upgrade is in progress at the moment. checkJnPreviousDirExistence(qjCluster, false); checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkJnPreviousDirExistence(qjCluster, true); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); cluster.restartNameNode(1); // Make the second NN (not the one that initiated the upgrade) active when // the finalize command is run. cluster.transitionToStandby(0); cluster.transitionToActive(1); runFinalizeCommand(cluster); checkClusterPreviousDirExistence(cluster, false); checkJnPreviousDirExistence(qjCluster, false); assertCTimesEqual(cluster); } finally { if (fs != null) { fs.close(); } if (qjCluster != null) { qjCluster.shutdown(); } } } /** * Make sure that an HA NN will start if a previous upgrade was in progress. */ @Test public void testStartingWithUpgradeInProgressSucceeds() throws Exception { MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleHATopology()) .numDataNodes(0) .build(); // Simulate an upgrade having started. for (int i = 0; i < 2; i++) { for (URI uri : cluster.getNameDirs(i)) { File prevTmp = new File(new File(uri), Storage.STORAGE_TMP_PREVIOUS); LOG.info("creating previous tmp dir: " + prevTmp); assertTrue(prevTmp.mkdirs()); } } cluster.restartNameNodes(); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Test rollback with NFS shared dir. */ @Test public void testRollbackWithNfs() throws Exception { MiniDFSCluster cluster = null; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleHATopology()) .numDataNodes(0) .build(); File sharedDir = new File(cluster.getSharedEditsDir(0, 1)); // No upgrade is in progress at the moment. checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); checkPreviousDirExistence(sharedDir, false); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkPreviousDirExistence(sharedDir, true); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); cluster.restartNameNode(1); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, true); checkPreviousDirExistence(sharedDir, true); assertCTimesEqual(cluster); // Now shut down the cluster and do the rollback. Collection<URI> nn1NameDirs = cluster.getNameDirs(0); cluster.shutdown(); conf.setStrings(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, Joiner.on(",").join(nn1NameDirs)); NameNode.doRollback(conf, false); // The rollback operation should have rolled back the first NN's local // dirs, and the shared dir, but not the other NN's dirs. Those have to be // done by bootstrapping the standby. checkNnPreviousDirExistence(cluster, 0, false); checkPreviousDirExistence(sharedDir, false); } finally { if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } } } @Test public void testRollbackWithJournalNodes() throws IOException, URISyntaxException { MiniQJMHACluster qjCluster = null; FileSystem fs = null; try { Builder builder = new MiniQJMHACluster.Builder(conf); builder.getDfsBuilder() .numDataNodes(0); qjCluster = builder.build(); MiniDFSCluster cluster = qjCluster.getDfsCluster(); // No upgrade is in progress at the moment. checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); checkJnPreviousDirExistence(qjCluster, false); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); final long cidBeforeUpgrade = getCommittedTxnIdValue(qjCluster); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkJnPreviousDirExistence(qjCluster, true); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); final long cidDuringUpgrade = getCommittedTxnIdValue(qjCluster); assertTrue(cidDuringUpgrade > cidBeforeUpgrade); // Now bootstrap the standby with the upgraded info. int rc = BootstrapStandby.run( new String[]{"-force"}, cluster.getConfiguration(1)); assertEquals(0, rc); cluster.restartNameNode(1); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, true); checkJnPreviousDirExistence(qjCluster, true); assertCTimesEqual(cluster); // Shut down the NNs, but deliberately leave the JNs up and running. Collection<URI> nn1NameDirs = cluster.getNameDirs(0); cluster.shutdown(); conf.setStrings(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, Joiner.on(",").join(nn1NameDirs)); NameNode.doRollback(conf, false); final long cidAfterRollback = getCommittedTxnIdValue(qjCluster); assertTrue(cidBeforeUpgrade < cidAfterRollback); // make sure the committedTxnId has been reset correctly after rollback assertTrue(cidDuringUpgrade > cidAfterRollback); // The rollback operation should have rolled back the first NN's local // dirs, and the shared dir, but not the other NN's dirs. Those have to be // done by bootstrapping the standby. checkNnPreviousDirExistence(cluster, 0, false); checkJnPreviousDirExistence(qjCluster, false); } finally { if (fs != null) { fs.close(); } if (qjCluster != null) { qjCluster.shutdown(); } } } /** * Make sure that starting a second NN with the -upgrade flag fails if the * other NN has already done that. */ @Test public void testCannotUpgradeSecondNameNode() throws IOException, URISyntaxException { MiniDFSCluster cluster = null; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleHATopology()) .numDataNodes(0) .build(); File sharedDir = new File(cluster.getSharedEditsDir(0, 1)); // No upgrade is in progress at the moment. checkClusterPreviousDirExistence(cluster, false); assertCTimesEqual(cluster); checkPreviousDirExistence(sharedDir, false); // Transition NN0 to active and do some FS ops. cluster.transitionToActive(0); fs = HATestUtil.configureFailoverFs(cluster, conf); assertTrue(fs.mkdirs(new Path("/foo1"))); // Do the upgrade. Shut down NN1 and then restart NN0 with the upgrade // flag. cluster.shutdownNameNode(1); cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.UPGRADE); cluster.restartNameNode(0, false); checkNnPreviousDirExistence(cluster, 0, true); checkNnPreviousDirExistence(cluster, 1, false); checkPreviousDirExistence(sharedDir, true); // NN0 should come up in the active state when given the -upgrade option, // so no need to transition it to active. assertTrue(fs.mkdirs(new Path("/foo2"))); // Restart NN0 without the -upgrade flag, to make sure that works. cluster.getNameNodeInfos()[0].setStartOpt(StartupOption.REGULAR); cluster.restartNameNode(0, false); // Make sure we can still do FS ops after upgrading. cluster.transitionToActive(0); assertTrue(fs.mkdirs(new Path("/foo3"))); // Make sure that starting the second NN with the -upgrade flag fails. cluster.getNameNodeInfos()[1].setStartOpt(StartupOption.UPGRADE); try { cluster.restartNameNode(1, false); fail("Should not have been able to start second NN with -upgrade"); } catch (IOException ioe) { GenericTestUtils.assertExceptionContains( "It looks like the shared log is already being upgraded", ioe); } } finally { if (fs != null) { fs.close(); } if (cluster != null) { cluster.shutdown(); } } } }
/* * Copyright 2013 YTEQ Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fixb.impl; import org.fixb.FixException; import org.fixb.FixFieldExtractor; import org.fixb.meta.*; import org.joda.time.*; import java.math.BigDecimal; import java.util.*; /** * An implementation of FixFieldExtractor that is used to extract field values straight from a raw string representation * of a FIX message. It is the fastest implementation of FixFieldExtractor as it does not create any intermediate * representation of extracted data. * * @author vladyslav.yatsenko */ public class NativeFixFieldExtractor implements FixFieldExtractor<String> { private final FixEnumDictionary fixEnumDictionary; public NativeFixFieldExtractor(FixEnumDictionary fixEnumDictionary) { this.fixEnumDictionary = fixEnumDictionary; } @Override public <T> T getFieldValue(String fixMessage, Class<T> type, int tag, boolean optional) { return extractFieldValue(FieldCursor.create(fixMessage), tag, type, optional); } @Override public <T, C extends Collection<T>> C getGroups(String fixMessage, Class<C> type, int groupTag, Class<T> elementType, int elementTag, boolean optional) { return getGroups(FieldCursor.create(fixMessage), type, groupTag, elementType, elementTag, optional); } @Override public <T, C extends Collection<T>> C getGroups(String fixMessage, Class<C> type, int groupTag, FixBlockMeta<T> componentMeta, boolean optional) { return getGroups(FieldCursor.create(fixMessage), type, groupTag, componentMeta, optional); } /** * Extracts values using the given FieldCursor that are defined by the given FixBlockMeta and creates an object as defined by the block meta. * * @param cursor a FieldCursor operating on the FIX message of interest * @param componentMeta a metadata with FIX mappings used to create the resulting POJO * @param <T> a type of object to create * @return an instance of type T populated with values from the given FieldCursor. */ @SuppressWarnings("unchecked") <T> T extractFixBlock(FieldCursor cursor, FixBlockMeta<T> componentMeta) { final Map<FixFieldMeta, Object> values = new LinkedHashMap<>(componentMeta.getFields().size()); for (FixFieldMeta f : componentMeta.getFields()) { if (f instanceof FixConstantFieldMeta) continue; final Object fieldValue; if (f.isGroup()) { final FixGroupMeta groupMeta = (FixGroupMeta) f; fieldValue = groupMeta.isSimple() ? getGroups(cursor, (Class<Collection<Object>>) groupMeta.getType(), groupMeta.getTag(), (Class<Object>) groupMeta.getComponentType(), groupMeta.getComponentTag(), groupMeta.isOptional()) : getGroups(cursor, (Class<Collection<Object>>) groupMeta.getType(), groupMeta.getTag(), (FixBlockMeta<Object>) groupMeta.getComponentMeta(), groupMeta.isOptional()); } else { fieldValue = extractFieldValue(cursor, f.getTag(), f.getType(), f.isOptional()); } values.put(f, fieldValue); } return componentMeta.createModel(values); } private <T, C extends Collection<T>> C getGroups(FieldCursor cursor, Class<C> type, int groupTag, FixBlockMeta<T> componentMeta, boolean optional) { if (!cursor.nextField(groupTag)) { if (optional) { return CollectionFactory.createCollection(type); } else { throw FixException.fieldNotFound(groupTag, cursor.fixMessage); } } final int count = Integer.parseInt(cursor.lastValue); final C result = CollectionFactory.createCollection(type); for (int i = 0; i < count; i++) { result.add(extractFixBlock(cursor, componentMeta)); } return result; } private <T, C extends Collection<T>> C getGroups(FieldCursor cursor, Class<C> type, int groupTag, Class<T> elementType, int elementTag, boolean optional) { if (!cursor.nextField(groupTag)) { if (optional) { return CollectionFactory.createCollection(type); } else { throw FixException.fieldNotFound(groupTag, cursor.fixMessage); } } final int count = Integer.parseInt(cursor.lastValue); final C result = CollectionFactory.createCollection(type); for (int i = 0; i < count && cursor.nextField(elementTag); i++) { result.add(toRequestedType(cursor.lastValue, elementType)); } return result; } private <T> T extractFieldValue(FieldCursor cursor, int tag, Class<T> type, boolean optional) { T value = null; if (cursor.nextField(tag)) { value = toRequestedType(cursor.lastValue, type); } if (value != null) { return value; } else if (optional) { return null; } else { throw FixException.fieldNotFound(tag, cursor.fixMessage); } } @SuppressWarnings("unchecked") private <T> T toRequestedType(String value, Class<T> type) { if (type == String.class) return (T) value; if (type == Boolean.class || type == boolean.class) return (T) toBoolean(value); if (type == Character.class || type == char.class) return (T) (Character) value.charAt(0); if (type == Byte.class || type == byte.class) return (T) Byte.valueOf(value); if (type == Short.class || type == short.class) return (T) Short.valueOf(value); if (type == Integer.class || type == int.class) return (T) Integer.valueOf(value); if (type == Double.class || type == double.class) return (T) Double.valueOf(value); if (type == Float.class || type == float.class) return (T) Float.valueOf(value); if (type == BigDecimal.class) return (T) new BigDecimal(value); if (type == Date.class) return (T) toDate(value); if (type == LocalDate.class) return (T) toLocalDate(value); if (type == DateTime.class) return (T) toDateTime(value); if (type == LocalDateTime.class) return (T) toLocalDateTime(value); if (type == LocalTime.class) return (T) toLocalTime(value); if (type.isEnum()) return (T) toEnum(value, (Class<Enum>) type); return null; } private LocalDate toLocalDate(String value) { int[] f = extractDateFields(value); return new LocalDate(f[0], f[1], f[2]); } private LocalTime toLocalTime(String value) { int[] f = extractTimeFields(value, 4, "UTCTimeOnly"); return new LocalTime(f[0], f[1], f[2], f[3]); } private LocalDateTime toLocalDateTime(String value) { int[] f = extractTimeFields(value, 5, "UTCTimestamp"); int[] d = extractDateFields(f[0]); return new LocalDateTime(d[0], d[1], d[2], f[1], f[2], f[3], f[4]); } private DateTime toDateTime(String value) { int[] f = extractTimeFields(value, 6, "UTCTimestamp or TZTimestamp"); int[] d = extractDateFields(f[0]); return new DateTime(d[0], d[1], d[2], f[1], f[2], f[3], DateTimeZone.forOffsetHoursMinutes(f[4], f[5])); } private int[] extractDateFields(int value) { return new int[]{ value / 10000, value / 100 - value / 10000 * 100, value - value / 100 * 100 }; } private int[] extractTimeFields(String value, int fieldCount, String fixTypeName) { if (value.length() < 5 && value.length() > 23) { throw new FixException("Expected " + fixTypeName + ", got: " + value); } IntTokenizer tokenizer = new IntTokenizer(value, ":-+Z"); try { return tokenizer.nextNWithDefault(fieldCount, 0); } catch (NumberFormatException e) { throw new FixException("Expected " + fixTypeName + ", got: " + value); } } private int[] extractDateFields(String value) { if (value.length() != 8) { throw new FixException("Expected UTCDateOnly or LocalMktDate, got: " + value); } try { return extractDateFields(Integer.valueOf(value)); } catch (NumberFormatException e) { throw new FixException("Expected UTCDateOnly or LocalMktDate, got: " + value); } } private Boolean toBoolean(String value) { return "Y".equals(value) || "1".equals(value); } private Enum<?> toEnum(String value, Class<Enum> type) { int fieldValue = Integer.valueOf(value); if (fixEnumDictionary.hasFixEnumMeta(type)) { return fixEnumDictionary.getFixEnumMeta(type).enumForFixValue(value); } for (Enum enumValue : type.getEnumConstants()) { int ordValue = enumValue.ordinal() + 1; if (ordValue == fieldValue) { return enumValue; } } throw new IllegalArgumentException("Invalid ordinal of enum type " + type + ": " + fieldValue); } private Date toDate(String value) { return toDateTime(value).toDate(); } static class IntTokenizer { private final StringTokenizer tokenizer; IntTokenizer(String s, String delimiters) { tokenizer = new StringTokenizer(s, delimiters); } boolean hasNext() { return tokenizer.hasMoreTokens(); } int next() { return Integer.parseInt(tokenizer.nextToken()); } int[] nextNWithDefault(int n, int def) { int[] result = new int[n]; for (int i = 0; i < n; i++) { result[i] = hasNext() ? next() : def; } return result; } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/notebooks/v1/service.proto package com.google.cloud.notebooks.v1; /** * * * <pre> * Response for checking if a notebook instance is healthy. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1.GetInstanceHealthResponse} */ public final class GetInstanceHealthResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1.GetInstanceHealthResponse) GetInstanceHealthResponseOrBuilder { private static final long serialVersionUID = 0L; // Use GetInstanceHealthResponse.newBuilder() to construct. private GetInstanceHealthResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetInstanceHealthResponse() { healthState_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetInstanceHealthResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private GetInstanceHealthResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); healthState_ = rawValue; break; } case 18: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { healthInfo_ = com.google.protobuf.MapField.newMapField( HealthInfoDefaultEntryHolder.defaultEntry); mutable_bitField0_ |= 0x00000001; } com.google.protobuf.MapEntry<java.lang.String, java.lang.String> healthInfo__ = input.readMessage( HealthInfoDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); healthInfo_.getMutableMap().put(healthInfo__.getKey(), healthInfo__.getValue()); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapField internalGetMapField(int number) { switch (number) { case 2: return internalGetHealthInfo(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1.GetInstanceHealthResponse.class, com.google.cloud.notebooks.v1.GetInstanceHealthResponse.Builder.class); } /** * * * <pre> * If an instance is healthy or not. * </pre> * * Protobuf enum {@code google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState} */ public enum HealthState implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * The instance substate is unknown. * </pre> * * <code>HEALTH_STATE_UNSPECIFIED = 0;</code> */ HEALTH_STATE_UNSPECIFIED(0), /** * * * <pre> * The instance is known to be in an healthy state * (for example, critical daemons are running) * Applies to ACTIVE state. * </pre> * * <code>HEALTHY = 1;</code> */ HEALTHY(1), /** * * * <pre> * The instance is known to be in an unhealthy state * (for example, critical daemons are not running) * Applies to ACTIVE state. * </pre> * * <code>UNHEALTHY = 2;</code> */ UNHEALTHY(2), /** * * * <pre> * The instance has not installed health monitoring agent. * Applies to ACTIVE state. * </pre> * * <code>AGENT_NOT_INSTALLED = 3;</code> */ AGENT_NOT_INSTALLED(3), /** * * * <pre> * The instance health monitoring agent is not running. * Applies to ACTIVE state. * </pre> * * <code>AGENT_NOT_RUNNING = 4;</code> */ AGENT_NOT_RUNNING(4), UNRECOGNIZED(-1), ; /** * * * <pre> * The instance substate is unknown. * </pre> * * <code>HEALTH_STATE_UNSPECIFIED = 0;</code> */ public static final int HEALTH_STATE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The instance is known to be in an healthy state * (for example, critical daemons are running) * Applies to ACTIVE state. * </pre> * * <code>HEALTHY = 1;</code> */ public static final int HEALTHY_VALUE = 1; /** * * * <pre> * The instance is known to be in an unhealthy state * (for example, critical daemons are not running) * Applies to ACTIVE state. * </pre> * * <code>UNHEALTHY = 2;</code> */ public static final int UNHEALTHY_VALUE = 2; /** * * * <pre> * The instance has not installed health monitoring agent. * Applies to ACTIVE state. * </pre> * * <code>AGENT_NOT_INSTALLED = 3;</code> */ public static final int AGENT_NOT_INSTALLED_VALUE = 3; /** * * * <pre> * The instance health monitoring agent is not running. * Applies to ACTIVE state. * </pre> * * <code>AGENT_NOT_RUNNING = 4;</code> */ public static final int AGENT_NOT_RUNNING_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static HealthState valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static HealthState forNumber(int value) { switch (value) { case 0: return HEALTH_STATE_UNSPECIFIED; case 1: return HEALTHY; case 2: return UNHEALTHY; case 3: return AGENT_NOT_INSTALLED; case 4: return AGENT_NOT_RUNNING; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<HealthState> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<HealthState> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<HealthState>() { public HealthState findValueByNumber(int number) { return HealthState.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.notebooks.v1.GetInstanceHealthResponse.getDescriptor() .getEnumTypes() .get(0); } private static final HealthState[] VALUES = values(); public static HealthState valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private HealthState(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState) } public static final int HEALTH_STATE_FIELD_NUMBER = 1; private int healthState_; /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for healthState. */ @java.lang.Override public int getHealthStateValue() { return healthState_; } /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The healthState. */ @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState getHealthState() { @SuppressWarnings("deprecation") com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState result = com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState.valueOf(healthState_); return result == null ? com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState.UNRECOGNIZED : result; } public static final int HEALTH_INFO_FIELD_NUMBER = 2; private static final class HealthInfoDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_HealthInfoEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } private com.google.protobuf.MapField<java.lang.String, java.lang.String> healthInfo_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetHealthInfo() { if (healthInfo_ == null) { return com.google.protobuf.MapField.emptyMapField(HealthInfoDefaultEntryHolder.defaultEntry); } return healthInfo_; } public int getHealthInfoCount() { return internalGetHealthInfo().getMap().size(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public boolean containsHealthInfo(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetHealthInfo().getMap().containsKey(key); } /** Use {@link #getHealthInfoMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getHealthInfo() { return getHealthInfoMap(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getHealthInfoMap() { return internalGetHealthInfo().getMap(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.lang.String getHealthInfoOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHealthInfo().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.lang.String getHealthInfoOrThrow(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHealthInfo().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (healthState_ != com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState .HEALTH_STATE_UNSPECIFIED .getNumber()) { output.writeEnum(1, healthState_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetHealthInfo(), HealthInfoDefaultEntryHolder.defaultEntry, 2); unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (healthState_ != com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState .HEALTH_STATE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, healthState_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetHealthInfo().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> healthInfo__ = HealthInfoDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, healthInfo__); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.notebooks.v1.GetInstanceHealthResponse)) { return super.equals(obj); } com.google.cloud.notebooks.v1.GetInstanceHealthResponse other = (com.google.cloud.notebooks.v1.GetInstanceHealthResponse) obj; if (healthState_ != other.healthState_) return false; if (!internalGetHealthInfo().equals(other.internalGetHealthInfo())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + HEALTH_STATE_FIELD_NUMBER; hash = (53 * hash) + healthState_; if (!internalGetHealthInfo().getMap().isEmpty()) { hash = (37 * hash) + HEALTH_INFO_FIELD_NUMBER; hash = (53 * hash) + internalGetHealthInfo().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.notebooks.v1.GetInstanceHealthResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for checking if a notebook instance is healthy. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1.GetInstanceHealthResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1.GetInstanceHealthResponse) com.google.cloud.notebooks.v1.GetInstanceHealthResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField(int number) { switch (number) { case 2: return internalGetHealthInfo(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMutableMapField(int number) { switch (number) { case 2: return internalGetMutableHealthInfo(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1.GetInstanceHealthResponse.class, com.google.cloud.notebooks.v1.GetInstanceHealthResponse.Builder.class); } // Construct using com.google.cloud.notebooks.v1.GetInstanceHealthResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); healthState_ = 0; internalGetMutableHealthInfo().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_GetInstanceHealthResponse_descriptor; } @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse getDefaultInstanceForType() { return com.google.cloud.notebooks.v1.GetInstanceHealthResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse build() { com.google.cloud.notebooks.v1.GetInstanceHealthResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse buildPartial() { com.google.cloud.notebooks.v1.GetInstanceHealthResponse result = new com.google.cloud.notebooks.v1.GetInstanceHealthResponse(this); int from_bitField0_ = bitField0_; result.healthState_ = healthState_; result.healthInfo_ = internalGetHealthInfo(); result.healthInfo_.makeImmutable(); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.notebooks.v1.GetInstanceHealthResponse) { return mergeFrom((com.google.cloud.notebooks.v1.GetInstanceHealthResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.notebooks.v1.GetInstanceHealthResponse other) { if (other == com.google.cloud.notebooks.v1.GetInstanceHealthResponse.getDefaultInstance()) return this; if (other.healthState_ != 0) { setHealthStateValue(other.getHealthStateValue()); } internalGetMutableHealthInfo().mergeFrom(other.internalGetHealthInfo()); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.notebooks.v1.GetInstanceHealthResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.notebooks.v1.GetInstanceHealthResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int healthState_ = 0; /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for healthState. */ @java.lang.Override public int getHealthStateValue() { return healthState_; } /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for healthState to set. * @return This builder for chaining. */ public Builder setHealthStateValue(int value) { healthState_ = value; onChanged(); return this; } /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The healthState. */ @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState getHealthState() { @SuppressWarnings("deprecation") com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState result = com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState.valueOf(healthState_); return result == null ? com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState.UNRECOGNIZED : result; } /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The healthState to set. * @return This builder for chaining. */ public Builder setHealthState( com.google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState value) { if (value == null) { throw new NullPointerException(); } healthState_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. Runtime health_state. * </pre> * * <code> * .google.cloud.notebooks.v1.GetInstanceHealthResponse.HealthState health_state = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearHealthState() { healthState_ = 0; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> healthInfo_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetHealthInfo() { if (healthInfo_ == null) { return com.google.protobuf.MapField.emptyMapField( HealthInfoDefaultEntryHolder.defaultEntry); } return healthInfo_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableHealthInfo() { onChanged(); ; if (healthInfo_ == null) { healthInfo_ = com.google.protobuf.MapField.newMapField(HealthInfoDefaultEntryHolder.defaultEntry); } if (!healthInfo_.isMutable()) { healthInfo_ = healthInfo_.copy(); } return healthInfo_; } public int getHealthInfoCount() { return internalGetHealthInfo().getMap().size(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public boolean containsHealthInfo(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetHealthInfo().getMap().containsKey(key); } /** Use {@link #getHealthInfoMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getHealthInfo() { return getHealthInfoMap(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getHealthInfoMap() { return internalGetHealthInfo().getMap(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.lang.String getHealthInfoOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHealthInfo().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.lang.String getHealthInfoOrThrow(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetHealthInfo().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearHealthInfo() { internalGetMutableHealthInfo().getMutableMap().clear(); return this; } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder removeHealthInfo(java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } internalGetMutableHealthInfo().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableHealthInfo() { return internalGetMutableHealthInfo().getMutableMap(); } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder putHealthInfo(java.lang.String key, java.lang.String value) { if (key == null) { throw new java.lang.NullPointerException(); } if (value == null) { throw new java.lang.NullPointerException(); } internalGetMutableHealthInfo().getMutableMap().put(key, value); return this; } /** * * * <pre> * Output only. Additional information about instance health. * Example: * healthInfo": { * "docker_proxy_agent_status": "1", * "docker_status": "1", * "jupyterlab_api_status": "-1", * "jupyterlab_status": "-1", * "updated": "2020-10-18 09:40:03.573409" * } * </pre> * * <code>map&lt;string, string&gt; health_info = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder putAllHealthInfo(java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableHealthInfo().getMutableMap().putAll(values); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1.GetInstanceHealthResponse) } // @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1.GetInstanceHealthResponse) private static final com.google.cloud.notebooks.v1.GetInstanceHealthResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1.GetInstanceHealthResponse(); } public static com.google.cloud.notebooks.v1.GetInstanceHealthResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetInstanceHealthResponse> PARSER = new com.google.protobuf.AbstractParser<GetInstanceHealthResponse>() { @java.lang.Override public GetInstanceHealthResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetInstanceHealthResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<GetInstanceHealthResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetInstanceHealthResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.notebooks.v1.GetInstanceHealthResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sjms.batch; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.Queue; import javax.jms.Session; import org.apache.camel.AggregationStrategy; import org.apache.camel.Exchange; import org.apache.camel.Predicate; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.support.DefaultConsumer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.StringHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SjmsBatchConsumer extends DefaultConsumer { public static final String SJMS_BATCH_TIMEOUT_CHECKER = "SJmsBatchTimeoutChecker"; private static final boolean TRANSACTED = true; private static final Logger LOG = LoggerFactory.getLogger(SjmsBatchConsumer.class); // global counters, maybe they should be on component instead? private static final AtomicInteger BATCH_COUNT = new AtomicInteger(); private static final AtomicLong MESSAGE_RECEIVED = new AtomicLong(); private static final AtomicLong MESSAGE_PROCESSED = new AtomicLong(); private ScheduledExecutorService timeoutCheckerExecutorService; private boolean shutdownTimeoutCheckerExecutorService; private final SjmsBatchEndpoint sjmsBatchEndpoint; private final AggregationStrategy aggregationStrategy; private final int completionSize; private final int completionInterval; private final int completionTimeout; private final Predicate completionPredicate; private final boolean eagerCheckCompletion; private final int consumerCount; private final int pollDuration; private final ConnectionFactory connectionFactory; private final String destinationName; private ExecutorService jmsConsumerExecutors; private final AtomicBoolean running = new AtomicBoolean(false); private final AtomicReference<CountDownLatch> consumersShutdownLatchRef = new AtomicReference<>(); private volatile Connection connection; public SjmsBatchConsumer(SjmsBatchEndpoint sjmsBatchEndpoint, Processor processor) { super(sjmsBatchEndpoint, processor); this.sjmsBatchEndpoint = ObjectHelper.notNull(sjmsBatchEndpoint, "batchJmsEndpoint"); destinationName = StringHelper.notEmpty(sjmsBatchEndpoint.getDestinationName(), "destinationName"); completionSize = sjmsBatchEndpoint.getCompletionSize(); completionInterval = sjmsBatchEndpoint.getCompletionInterval(); completionTimeout = sjmsBatchEndpoint.getCompletionTimeout(); if (completionInterval > 0 && completionTimeout != SjmsBatchEndpoint.DEFAULT_COMPLETION_TIMEOUT) { throw new IllegalArgumentException("Only one of completionInterval or completionTimeout can be used, not both."); } if (sjmsBatchEndpoint.isSendEmptyMessageWhenIdle() && completionTimeout <= 0 && completionInterval <= 0) { throw new IllegalArgumentException("SendEmptyMessageWhenIdle can only be enabled if either completionInterval or completionTimeout is also set"); } completionPredicate = sjmsBatchEndpoint.getCompletionPredicate(); eagerCheckCompletion = sjmsBatchEndpoint.isEagerCheckCompletion(); pollDuration = sjmsBatchEndpoint.getPollDuration(); if (pollDuration < 0) { throw new IllegalArgumentException("pollDuration must be 0 or greater"); } this.aggregationStrategy = ObjectHelper.notNull(sjmsBatchEndpoint.getAggregationStrategy(), "aggregationStrategy"); consumerCount = sjmsBatchEndpoint.getConsumerCount(); if (consumerCount <= 0) { throw new IllegalArgumentException("consumerCount must be greater than 0"); } SjmsBatchComponent sjmsBatchComponent = sjmsBatchEndpoint.getComponent(); connectionFactory = ObjectHelper.notNull(sjmsBatchComponent.getConnectionFactory(), "jmsBatchComponent.connectionFactory"); } @Override public SjmsBatchEndpoint getEndpoint() { return sjmsBatchEndpoint; } public ScheduledExecutorService getTimeoutCheckerExecutorService() { return timeoutCheckerExecutorService; } public void setTimeoutCheckerExecutorService(ScheduledExecutorService timeoutCheckerExecutorService) { this.timeoutCheckerExecutorService = timeoutCheckerExecutorService; } @Override protected void doStart() throws Exception { super.doStart(); boolean recovery = getEndpoint().isAsyncStartListener(); StartConsumerTask task = new StartConsumerTask(recovery, getEndpoint().getRecoveryInterval(), getEndpoint().getKeepAliveDelay()); if (recovery) { // use a background thread to keep starting the consumer until getEndpoint().getComponent().getAsyncStartStopExecutorService().submit(task); } else { task.run(); } } /** * Task to startup the consumer either synchronously or using asynchronous with recovery */ protected class StartConsumerTask implements Runnable { private boolean recoveryEnabled; private int recoveryInterval; private int keepAliveDelay; private long attempt; public StartConsumerTask(boolean recoveryEnabled, int recoveryInterval, int keepAliveDelay) { this.recoveryEnabled = recoveryEnabled; this.recoveryInterval = recoveryInterval; this.keepAliveDelay = keepAliveDelay; } @Override public void run() { jmsConsumerExecutors = getEndpoint().getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, "SjmsBatchConsumer", consumerCount); consumersShutdownLatchRef.set(new CountDownLatch(consumerCount)); if (completionInterval > 0) { LOG.info("Using CompletionInterval to run every {} millis.", completionInterval); if (timeoutCheckerExecutorService == null) { setTimeoutCheckerExecutorService(getEndpoint().getCamelContext().getExecutorServiceManager().newScheduledThreadPool(this, SJMS_BATCH_TIMEOUT_CHECKER, 1)); shutdownTimeoutCheckerExecutorService = true; } } // keep loop until we can connect while (isRunAllowed() && !running.get()) { Connection localConnection = null; try { attempt++; LOG.debug("Attempt #{}. Starting {} consumer(s) for {}:{}", attempt, consumerCount, destinationName, completionSize); // start up a shared connection localConnection = connectionFactory.createConnection(); localConnection.start(); // its success so prepare for exit connection = localConnection; final List<AtomicBoolean> triggers = new ArrayList<>(); for (int i = 0; i < consumerCount; i++) { BatchConsumptionLoop loop = new BatchConsumptionLoop(); loop.setKeepAliveDelay(keepAliveDelay); triggers.add(loop.getCompletionTimeoutTrigger()); jmsConsumerExecutors.submit(loop); } if (completionInterval > 0) { // trigger completion based on interval timeoutCheckerExecutorService.scheduleAtFixedRate(new CompletionIntervalTask(triggers), completionInterval, completionInterval, TimeUnit.MILLISECONDS); } if (attempt > 1) { LOG.info("Successfully refreshed connection after {} attempts.", attempt); } LOG.info("Started {} consumer(s) for {}:{}", consumerCount, destinationName, completionSize); running.set(true); return; } catch (Throwable e) { // we failed so close the local connection as we create a new on next attempt try { if (localConnection != null) { localConnection.close(); } } catch (Throwable t) { // ignore } if (recoveryEnabled) { getExceptionHandler().handleException("Error starting consumer after " + attempt + " attempts. Will try again in " + recoveryInterval + " millis.", e); } else { throw RuntimeCamelException.wrapRuntimeCamelException(e); } } // sleeping before next attempt try { LOG.debug("Attempt #{}. Sleeping {} before next attempt to recover", attempt, recoveryInterval); Thread.sleep(recoveryInterval); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return; } } } } @Override protected void doStop() throws Exception { super.doStop(); running.set(false); CountDownLatch consumersShutdownLatch = consumersShutdownLatchRef.get(); if (consumersShutdownLatch != null) { LOG.info("Stop signalled, waiting on consumers to shut down"); if (consumersShutdownLatch.await(60, TimeUnit.SECONDS)) { LOG.warn("Timeout waiting on consumer threads to signal completion - shutting down"); } else { LOG.info("All consumers have been shutdown"); } } else { LOG.info("Stop signalled while there are no consumers yet, so no need to wait for consumers"); } try { LOG.debug("Shutting down JMS connection"); connection.close(); } catch (Exception e) { // ignore } getEndpoint().getCamelContext().getExecutorServiceManager().shutdownGraceful(jmsConsumerExecutors); jmsConsumerExecutors = null; if (shutdownTimeoutCheckerExecutorService) { getEndpoint().getCamelContext().getExecutorServiceManager().shutdownGraceful(timeoutCheckerExecutorService); timeoutCheckerExecutorService = null; } } /** * Background task that triggers completion based on interval. */ private final class CompletionIntervalTask implements Runnable { private final List<AtomicBoolean> triggers; CompletionIntervalTask(List<AtomicBoolean> triggers) { this.triggers = triggers; } @Override public void run() { // only run if CamelContext has been fully started if (!getEndpoint().getCamelContext().getStatus().isStarted()) { LOG.trace("Completion interval task cannot start due CamelContext({}) has not been started yet", getEndpoint().getCamelContext().getName()); return; } // signal for (AtomicBoolean trigger : triggers) { trigger.set(true); } } } private class BatchConsumptionLoop implements Runnable { private final AtomicBoolean completionTimeoutTrigger = new AtomicBoolean(); private final BatchConsumptionTask task = new BatchConsumptionTask(completionTimeoutTrigger); private int keepAliveDelay; public AtomicBoolean getCompletionTimeoutTrigger() { return completionTimeoutTrigger; } public void setKeepAliveDelay(int i) { keepAliveDelay = i; } @Override public void run() { try { // This loop is intended to keep the consumer up and running as long as it's supposed to be, but allow it to bail if signaled. // I'm using a do/while loop because the first time through we want to attempt it regardless of any other conditions... we // only want to try AGAIN if the keepAlive is set. do { // a batch corresponds to a single session that will be committed or rolled back by a background thread final Session session = connection.createSession(TRANSACTED, Session.CLIENT_ACKNOWLEDGE); try { // only batch consumption from queues is supported - it makes no sense to transactionally consume // from a topic as you don't car about message loss, users can just use a regular aggregator instead Queue queue = session.createQueue(destinationName); MessageConsumer consumer = session.createConsumer(queue); try { task.consumeBatchesOnLoop(session, consumer); } finally { closeJmsConsumer(consumer); } } catch (javax.jms.IllegalStateException ex) { // from consumeBatchesOnLoop // if keepAliveDelay was not specified (defaults to -1) just rethrow to break the loop. This preserves original default behavior if (keepAliveDelay < 0) { throw ex; } // this will log the exception and the parent loop will create a new session getExceptionHandler().handleException("Exception caught consuming from " + destinationName, ex); //sleep to avoid log spamming if (keepAliveDelay > 0) { Thread.sleep(keepAliveDelay); } } finally { closeJmsSession(session); } }while (running.get() || isStarting()); } catch (Throwable ex) { // from consumeBatchesOnLoop // catch anything besides the IllegalStateException and exit the application getExceptionHandler().handleException("Exception caught consuming from " + destinationName, ex); } finally { // indicate that we have shut down CountDownLatch consumersShutdownLatch = consumersShutdownLatchRef.get(); consumersShutdownLatch.countDown(); } } private void closeJmsConsumer(MessageConsumer consumer) { try { consumer.close(); } catch (JMSException ex2) { // only include stacktrace in debug logging if (log.isDebugEnabled()) { log.debug("Exception caught closing consumer", ex2); } log.warn("Exception caught closing consumer: {}. This exception is ignored.", ex2.getMessage()); } } private void closeJmsSession(Session session) { try { session.close(); } catch (JMSException ex2) { // only include stacktrace in debug logging if (log.isDebugEnabled()) { log.debug("Exception caught closing session", ex2); } log.warn("Exception caught closing session: {}. This exception is ignored.", ex2.getMessage()); } } private final class BatchConsumptionTask { // state private final AtomicBoolean timeoutInterval; private final AtomicBoolean timeout = new AtomicBoolean(); private int messageCount; private long timeElapsed; private long startTime; private Exchange aggregatedExchange; BatchConsumptionTask(AtomicBoolean timeoutInterval) { this.timeoutInterval = timeoutInterval; } private void consumeBatchesOnLoop(final Session session, final MessageConsumer consumer) throws JMSException { final boolean usingTimeout = completionTimeout > 0; LOG.trace("BatchConsumptionTask +++ start +++"); while (running.get()) { LOG.trace("BatchConsumptionTask running"); if (timeout.compareAndSet(true, false) || timeoutInterval.compareAndSet(true, false)) { // trigger timeout LOG.trace("Completion batch due timeout"); String completedBy = completionInterval > 0 ? "interval" : "timeout"; completionBatch(session, completedBy); reset(); continue; } if (completionSize > 0 && messageCount >= completionSize) { // trigger completion size LOG.trace("Completion batch due size"); completionBatch(session, "size"); reset(); continue; } // check periodically to see whether we should be shutting down long waitTime = (usingTimeout && (timeElapsed > 0)) ? getReceiveWaitTime(timeElapsed) : pollDuration; Message message = consumer.receive(waitTime); if (running.get()) { // no interruptions received if (message == null) { // timed out, no message received LOG.trace("No message received"); } else { messageCount++; LOG.debug("#{} messages received", messageCount); if (usingTimeout && startTime == 0) { // this is the first message start counting down the period for this batch startTime = new Date().getTime(); } final Exchange exchange = getEndpoint().createExchange(message, session); aggregatedExchange = aggregationStrategy.aggregate(aggregatedExchange, exchange); aggregatedExchange.setProperty(Exchange.BATCH_SIZE, messageCount); // is the batch complete by predicate? if (completionPredicate != null) { try { boolean complete; if (eagerCheckCompletion) { complete = completionPredicate.matches(exchange); } else { complete = completionPredicate.matches(aggregatedExchange); } if (complete) { // trigger completion predicate LOG.trace("Completion batch due predicate"); completionBatch(session, "predicate"); reset(); } } catch (Exception e) { LOG.warn("Error during evaluation of completion predicate " + e.getMessage() + ". This exception is ignored.", e); } } } if (usingTimeout && startTime > 0) { // a batch has been started, check whether it should be timed out long currentTime = new Date().getTime(); timeElapsed = currentTime - startTime; if (timeElapsed > completionTimeout) { // batch finished by timeout timeout.set(true); } else { LOG.trace("This batch has more time until the timeout, elapsed: {} timeout: {}", timeElapsed, completionTimeout); } } } else { LOG.info("Shutdown signal received - rolling back batch"); session.rollback(); } } LOG.trace("BatchConsumptionTask +++ end +++"); } private void reset() { messageCount = 0; timeElapsed = 0; startTime = 0; aggregatedExchange = null; } private void completionBatch(final Session session, String completedBy) { // batch if (aggregatedExchange == null && getEndpoint().isSendEmptyMessageWhenIdle()) { processEmptyMessage(); } else if (aggregatedExchange != null) { processBatch(aggregatedExchange, session, completedBy); } } } /** * Determine the time that a call to {@link MessageConsumer#receive()} should wait given the time that has elapsed for this batch. * * @param timeElapsed The time that has elapsed. * @return The shorter of the time remaining or poll duration. */ private long getReceiveWaitTime(long timeElapsed) { long timeRemaining = getTimeRemaining(timeElapsed); // wait for the shorter of the time remaining or the poll duration if (timeRemaining <= 0) { // ensure that the thread doesn't wait indefinitely timeRemaining = 1; } final long waitTime = Math.min(timeRemaining, pollDuration); LOG.trace("Waiting for {}", waitTime); return waitTime; } private long getTimeRemaining(long timeElapsed) { long timeRemaining = completionTimeout - timeElapsed; if (LOG.isDebugEnabled() && timeElapsed > 0) { LOG.debug("Time remaining this batch: {}", timeRemaining); } return timeRemaining; } /** * No messages in batch so send an empty message instead. */ private void processEmptyMessage() { Exchange exchange = getEndpoint().createExchange(); log.debug("Sending empty message as there were no messages from polling: {}", getEndpoint()); try { getProcessor().process(exchange); } catch (Exception e) { getExceptionHandler().handleException("Error processing exchange", exchange, e); } } /** * Send an message with the batches messages. */ private void processBatch(Exchange exchange, Session session, String completedBy) { int id = BATCH_COUNT.getAndIncrement(); int batchSize = exchange.getProperty(Exchange.BATCH_SIZE, Integer.class); if (LOG.isDebugEnabled()) { long total = MESSAGE_RECEIVED.get() + batchSize; LOG.debug("Processing batch[" + id + "]:size=" + batchSize + ":total=" + total); } if ("timeout".equals(completedBy)) { aggregationStrategy.timeout(exchange, id, batchSize, completionTimeout); } exchange.setProperty(Exchange.AGGREGATED_COMPLETED_BY, completedBy); // invoke the on completion callback aggregationStrategy.onCompletion(exchange); SessionCompletion sessionCompletion = new SessionCompletion(session); exchange.addOnCompletion(sessionCompletion); try { getProcessor().process(exchange); long total = MESSAGE_PROCESSED.addAndGet(batchSize); LOG.debug("Completed processing[{}]:total={}", id, total); } catch (Exception e) { getExceptionHandler().handleException("Error processing exchange", exchange, e); } } } }
/* * Copyright (C) 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fathom.utils; import com.google.common.base.Preconditions; import fathom.Constants; import fathom.conf.Mode; import fathom.conf.RequireSetting; import fathom.conf.RequireSettings; import fathom.conf.RequireSettingValue; import fathom.conf.RequireSettingValues; import fathom.conf.Settings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.StringJoiner; /** * RequireUtil helps enforce runtime requirements for loading classes, * using instances, or executing methods. * * @author James Moger */ public class RequireUtil { private static final Logger log = LoggerFactory.getLogger(RequireUtil.class); /** * Determines if this object may be used in the current runtime environment. * Fathom settings are considered as well as runtime modes. * * @param settings * @param object * @return true if the object may be used */ public static boolean allowInstance(Settings settings, Object object) { Preconditions.checkNotNull(object, "Can not check runtime permissions on a null instance!"); if (object instanceof Method) { return allowMethod(settings, (Method) object); } return allowClass(settings, object.getClass()); } /** * Determines if this class may be used in the current runtime environment. * Fathom settings are considered as well as runtime modes. * * @param settings * @param aClass * @return true if the class may be used */ public static boolean allowClass(Settings settings, Class<?> aClass) { // Settings-based class exclusions/inclusions if (aClass.isAnnotationPresent(RequireSettings.class)) { // multiple keys required RequireSetting[] requireSettings = aClass.getAnnotation(RequireSettings.class).value(); StringJoiner joiner = new StringJoiner(", "); Arrays.asList(requireSettings).forEach((require) -> { if (!settings.hasSetting(require.value())) { joiner.add(require.value()); } }); String requiredSettings = joiner.toString(); if (!requiredSettings.isEmpty()) { log.warn("skipping {}, it requires the following {} mode settings: {}", aClass.getName(), settings.getMode(), requiredSettings); return false; } } else if (aClass.isAnnotationPresent(RequireSetting.class)) { // single key required RequireSetting requireSetting = aClass.getAnnotation(RequireSetting.class); String requiredKey = requireSetting.value(); if (!settings.hasSetting(requiredKey)) { log.warn("skipping {}, it requires the following {} mode setting: {}", aClass.getName(), settings.getMode(), requiredKey); return false; } } // Value-based class exclusions/inclusions if (aClass.isAnnotationPresent(RequireSettingValues.class)) { // multiple keys required RequireSettingValue[] requireSettingValues = aClass.getAnnotation(RequireSettingValues.class).value(); StringJoiner joiner = new StringJoiner(", "); Arrays.asList(requireSettingValues).forEach((require) -> { if (!require.value().equalsIgnoreCase(settings.getString(require.key(), null))) { joiner.add(require.key() + "=" + require.value()); } }); String requiredValues = joiner.toString(); if (!requiredValues.isEmpty()) { log.warn("skipping {}, it requires the following {} mode settings: {}", aClass.getName(), settings.getMode(), requiredValues); return false; } } else if (aClass.isAnnotationPresent(RequireSettingValue.class)) { // single key required RequireSettingValue require = aClass.getAnnotation(RequireSettingValue.class); if (!require.value().equalsIgnoreCase(settings.getString(require.key(), null))) { log.debug("skipping {}, it requires the following {} mode setting: {}={}", aClass.getName(), settings.getMode(), require.key(), require.value()); return false; } } // Mode-based class exclusions/inclusions Set<Constants.Mode> modes = new HashSet<>(); for (Annotation annotation : aClass.getAnnotations()) { Class<? extends Annotation> annotationClass = annotation.annotationType(); if (annotationClass.isAnnotationPresent(Mode.class)) { Mode mode = annotationClass.getAnnotation(Mode.class); modes.add(mode.value()); } } boolean allowInMode = modes.isEmpty() || modes.contains(settings.getMode()); if (!allowInMode) { StringJoiner joiner = new StringJoiner(", "); modes.forEach((mode) -> joiner.add(mode.name())); String requiredModes = joiner.toString(); log.warn("skipping {}, it may only be used in the following modes: {}", aClass.getName(), requiredModes); } return allowInMode; } /** * Determines if this method may be used in the current runtime environment. * Fathom settings are considered as well as runtime modes. * * @param settings * @param method * @return true if the method may be used */ public static boolean allowMethod(Settings settings, Method method) { // Settings-based method exclusions/inclusions if (method.isAnnotationPresent(RequireSettings.class)) { // multiple keys required RequireSetting[] requireSettings = method.getAnnotation(RequireSettings.class).value(); StringJoiner joiner = new StringJoiner(", "); Arrays.asList(requireSettings).forEach((require) -> { if (!settings.hasSetting(require.value())) { joiner.add(require.value()); } }); String requiredSettings = joiner.toString(); if (!requiredSettings.isEmpty()) { log.warn("skipping {}, it requires the following {} mode settings: {}", Util.toString(method), settings.getMode(), requiredSettings); return false; } } else if (method.isAnnotationPresent(RequireSetting.class)) { // single key required RequireSetting requireSetting = method.getAnnotation(RequireSetting.class); String requiredKey = requireSetting.value(); if (!settings.hasSetting(requiredKey)) { log.debug("skipping {}, it requires the following {} mode setting: {}", Util.toString(method), settings.getMode(), requiredKey); return false; } } // Value-based method exclusions/inclusions if (method.isAnnotationPresent(RequireSettingValues.class)) { // multiple keys required RequireSettingValue[] requireSettingValues = method.getAnnotation(RequireSettingValues.class).value(); StringJoiner joiner = new StringJoiner(", "); Arrays.asList(requireSettingValues).forEach((require) -> { if (!require.value().equalsIgnoreCase(settings.getString(require.key(), null))) { joiner.add(require.key() + "=" + require.value()); } }); String requiredValues = joiner.toString(); if (!requiredValues.isEmpty()) { log.warn("skipping {}, it requires the following {} mode settings: {}", Util.toString(method), settings.getMode(), requiredValues); return false; } } else if (method.isAnnotationPresent(RequireSettingValue.class)) { // single key required RequireSettingValue require = method.getAnnotation(RequireSettingValue.class); String requiredKey = require.key(); if (!require.value().equalsIgnoreCase(settings.getString(require.key(), null))) { log.debug("skipping {}, it requires the following {} mode setting: {}={}", Util.toString(method), settings.getMode(), requiredKey, require.value()); return false; } } // Mode-based method exclusions/inclusions Set<Constants.Mode> modes = new HashSet<>(); for (Annotation annotation : method.getAnnotations()) { Class<? extends Annotation> annotationClass = annotation.annotationType(); if (annotationClass.isAnnotationPresent(Mode.class)) { Mode mode = annotationClass.getAnnotation(Mode.class); modes.add(mode.value()); } } if (!modes.isEmpty() && !modes.contains(settings.getMode())) { StringJoiner joiner = new StringJoiner(", "); modes.forEach((mode) -> joiner.add(mode.name())); String requiredModes = joiner.toString(); log.warn("skipping {}, it may only be used in the following modes: {}", Util.toString(method), requiredModes); return false; } // method is allowed, check declaring class return allowClass(settings, method.getDeclaringClass()); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.08.03 at 06:08:32 PM CEST // package eu.fbk.knowledgestore.populator.naf.model; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.NormalizedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "sentimentOrSpanOrExternalReferencesOrComponent" }) @XmlRootElement(name = "term") public class Term { @XmlAttribute(name = "id", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID protected String id; @XmlAttribute(name = "type") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String type; @XmlAttribute(name = "lemma") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String lemma; @XmlAttribute(name = "pos") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String pos; @XmlAttribute(name = "morphofeat") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String morphofeat; @XmlAttribute(name = "netype") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String netype; @XmlAttribute(name = "case") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String _case; @XmlAttribute(name = "head") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) protected String head; @XmlElements({ @XmlElement(name = "sentiment", required = true, type = Sentiment.class), @XmlElement(name = "span", required = true, type = Span.class), @XmlElement(name = "externalReferences", required = true, type = ExternalReferences.class), @XmlElement(name = "component", required = true, type = Component.class) }) protected List<Object> sentimentOrSpanOrExternalReferencesOrComponent; /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the type property. * * @return * possible object is * {@link String } * */ public String getType() { return type; } /** * Sets the value of the type property. * * @param value * allowed object is * {@link String } * */ public void setType(String value) { this.type = value; } /** * Gets the value of the lemma property. * * @return * possible object is * {@link String } * */ public String getLemma() { return lemma; } /** * Sets the value of the lemma property. * * @param value * allowed object is * {@link String } * */ public void setLemma(String value) { this.lemma = value; } /** * Gets the value of the pos property. * * @return * possible object is * {@link String } * */ public String getPos() { return pos; } /** * Sets the value of the pos property. * * @param value * allowed object is * {@link String } * */ public void setPos(String value) { this.pos = value; } /** * Gets the value of the morphofeat property. * * @return * possible object is * {@link String } * */ public String getMorphofeat() { return morphofeat; } /** * Sets the value of the morphofeat property. * * @param value * allowed object is * {@link String } * */ public void setMorphofeat(String value) { this.morphofeat = value; } /** * Gets the value of the netype property. * * @return * possible object is * {@link String } * */ public String getNetype() { return netype; } /** * Sets the value of the netype property. * * @param value * allowed object is * {@link String } * */ public void setNetype(String value) { this.netype = value; } /** * Gets the value of the case property. * * @return * possible object is * {@link String } * */ public String getCase() { return _case; } /** * Sets the value of the case property. * * @param value * allowed object is * {@link String } * */ public void setCase(String value) { this._case = value; } /** * Gets the value of the head property. * * @return * possible object is * {@link String } * */ public String getHead() { return head; } /** * Sets the value of the head property. * * @param value * allowed object is * {@link String } * */ public void setHead(String value) { this.head = value; } /** * Gets the value of the sentimentOrSpanOrExternalReferencesOrComponent property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the sentimentOrSpanOrExternalReferencesOrComponent property. * * <p> * For example, to add a new item, do as follows: * <pre> * getSentimentOrSpanOrExternalReferencesOrComponent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Sentiment } * {@link Span } * {@link ExternalReferences } * {@link Component } * * */ public List<Object> getSentimentOrSpanOrExternalReferencesOrComponent() { if (sentimentOrSpanOrExternalReferencesOrComponent == null) { sentimentOrSpanOrExternalReferencesOrComponent = new ArrayList<Object>(); } return this.sentimentOrSpanOrExternalReferencesOrComponent; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.transactions; import java.io.Externalizable; import java.nio.ByteBuffer; import java.util.Collection; import java.util.LinkedList; import java.util.UUID; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import javax.cache.expiry.ExpiryPolicy; import javax.cache.processor.EntryProcessor; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.IgniteCodeGeneratingFail; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.CacheInvokeEntry; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheOperation; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.GridCacheUtils; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.IgniteExternalizableExpiryPolicy; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.lang.GridPeerDeployAware; import org.apache.ignite.internal.util.tostring.GridToStringBuilder; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.READ; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.TRANSFORM; import static org.apache.ignite.internal.processors.cache.GridCacheUtils.KEEP_BINARY_FLAG_MASK; import static org.apache.ignite.internal.processors.cache.GridCacheUtils.OLD_VAL_ON_PRIMARY; import static org.apache.ignite.internal.processors.cache.GridCacheUtils.SKIP_STORE_FLAG_MASK; /** * Transaction entry. Note that it is essential that this class does not override * {@link #equals(Object)} method, as transaction entries should use referential * equality. */ @IgniteCodeGeneratingFail // Field filters should not be generated by MessageCodeGenerator. public class IgniteTxEntry implements GridPeerDeployAware, Message { /** */ private static final long serialVersionUID = 0L; /** Dummy version for non-existing entry read in SERIALIZABLE transaction. */ public static final GridCacheVersion SER_READ_EMPTY_ENTRY_VER = new GridCacheVersion(0, 0, 0, 0); /** Dummy version for any existing entry read in SERIALIZABLE transaction. */ public static final GridCacheVersion SER_READ_NOT_EMPTY_VER = new GridCacheVersion(0, 0, 0, 1); /** */ public static final GridCacheVersion GET_ENTRY_INVALID_VER_UPDATED = new GridCacheVersion(0, 0, 0, 2); /** */ public static final GridCacheVersion GET_ENTRY_INVALID_VER_AFTER_GET = new GridCacheVersion(0, 0, 0, 3); /** Prepared flag updater. */ private static final AtomicIntegerFieldUpdater<IgniteTxEntry> PREPARED_UPD = AtomicIntegerFieldUpdater.newUpdater(IgniteTxEntry.class, "prepared"); /** Owning transaction. */ @GridToStringExclude @GridDirectTransient private IgniteInternalTx tx; /** Cache key. */ @GridToStringInclude private KeyCacheObject key; /** Cache ID. */ private int cacheId; /** Transient tx key. */ @GridDirectTransient private IgniteTxKey txKey; /** Cache value. */ @GridToStringInclude private TxEntryValueHolder val = new TxEntryValueHolder(); /** Visible value for peek. */ @GridToStringInclude @GridDirectTransient private TxEntryValueHolder prevVal = new TxEntryValueHolder(); /** Old value before update. */ @GridToStringInclude private TxEntryValueHolder oldVal = new TxEntryValueHolder(); /** Transform. */ @GridToStringInclude @GridDirectTransient private Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessorsCol; /** Transient field for calculated entry processor value. */ @GridDirectTransient private T2<GridCacheOperation, CacheObject> entryProcessorCalcVal; /** Transform closure bytes. */ @GridToStringExclude private byte[] transformClosBytes; /** Time to live. */ private long ttl; /** DR expire time (explicit) */ private long conflictExpireTime = CU.EXPIRE_TIME_CALCULATE; /** Conflict version. */ private GridCacheVersion conflictVer; /** Explicit lock version if there is one. */ @GridToStringInclude private GridCacheVersion explicitVer; /** DHT version. */ @GridDirectTransient private volatile GridCacheVersion dhtVer; /** Put filters. */ @GridToStringInclude private CacheEntryPredicate[] filters; /** Flag indicating whether filters passed. Used for fast-commit transactions. */ @GridDirectTransient private boolean filtersPassed; /** Flag indicating that filter is set and can not be replaced. */ @GridDirectTransient private boolean filtersSet; /** Underlying cache entry. */ @GridDirectTransient private volatile GridCacheEntryEx entry; /** Cache registry. */ @GridDirectTransient private GridCacheContext<?, ?> ctx; /** Prepared flag to prevent multiple candidate add. */ @SuppressWarnings("UnusedDeclaration") @GridDirectTransient private transient volatile int prepared; /** Lock flag for collocated cache. */ @GridDirectTransient private transient boolean locked; /** Assigned node ID (required only for partitioned cache). */ @GridDirectTransient private UUID nodeId; /** Flag if this node is a back up node. */ @GridDirectTransient private boolean locMapped; /** Expiry policy. */ @GridDirectTransient private ExpiryPolicy expiryPlc; /** Expiry policy transfer flag. */ @GridDirectTransient private boolean transferExpiryPlc; /** Expiry policy bytes. */ private byte[] expiryPlcBytes; /** * Additional flags: * <ul> * <li>{@link GridCacheUtils#SKIP_STORE_FLAG_MASK} - for skipStore flag value.</li> * <li>{@link GridCacheUtils#KEEP_BINARY_FLAG_MASK} - for withKeepBinary flag.</li> * </ul> */ private byte flags; /** Partition update counter. */ @GridDirectTransient private long partUpdateCntr; /** */ private GridCacheVersion serReadVer; /** * Required by {@link Externalizable} */ public IgniteTxEntry() { /* No-op. */ } /** * This constructor is meant for remote transactions. * * @param ctx Cache registry. * @param tx Owning transaction. * @param op Operation. * @param val Value. * @param ttl Time to live. * @param conflictExpireTime DR expire time. * @param entry Cache entry. * @param conflictVer Data center replication version. * @param skipStore Skip store flag. */ public IgniteTxEntry(GridCacheContext<?, ?> ctx, IgniteInternalTx tx, GridCacheOperation op, CacheObject val, long ttl, long conflictExpireTime, GridCacheEntryEx entry, @Nullable GridCacheVersion conflictVer, boolean skipStore, boolean keepBinary ) { assert ctx != null; assert tx != null; assert op != null; assert entry != null; this.ctx = ctx; this.tx = tx; this.val.value(op, val, false, false); this.entry = entry; this.ttl = ttl; this.conflictExpireTime = conflictExpireTime; this.conflictVer = conflictVer; skipStore(skipStore); keepBinary(keepBinary); key = entry.key(); cacheId = entry.context().cacheId(); } /** * This constructor is meant for local transactions. * * @param ctx Cache registry. * @param tx Owning transaction. * @param op Operation. * @param val Value. * @param entryProcessor Entry processor. * @param invokeArgs Optional arguments for EntryProcessor. * @param ttl Time to live. * @param entry Cache entry. * @param filters Put filters. * @param conflictVer Data center replication version. * @param skipStore Skip store flag. */ public IgniteTxEntry(GridCacheContext<?, ?> ctx, IgniteInternalTx tx, GridCacheOperation op, CacheObject val, EntryProcessor<Object, Object, Object> entryProcessor, Object[] invokeArgs, long ttl, GridCacheEntryEx entry, CacheEntryPredicate[] filters, GridCacheVersion conflictVer, boolean skipStore, boolean keepBinary ) { assert ctx != null; assert tx != null; assert op != null; assert entry != null; this.ctx = ctx; this.tx = tx; this.val.value(op, val, false, false); this.entry = entry; this.ttl = ttl; this.filters = filters; this.conflictVer = conflictVer; skipStore(skipStore); keepBinary(keepBinary); if (entryProcessor != null) addEntryProcessor(entryProcessor, invokeArgs); key = entry.key(); cacheId = entry.context().cacheId(); } /** * @return Cache context for this tx entry. */ public GridCacheContext<?, ?> context() { return ctx; } /** * @return Flag indicating if this entry is affinity mapped to the same node. */ public boolean locallyMapped() { return locMapped; } /** * @param locMapped Flag indicating if this entry is affinity mapped to the same node. */ public void locallyMapped(boolean locMapped) { this.locMapped = locMapped; } /** * @param ctx Context. * @return Clean copy of this entry. */ public IgniteTxEntry cleanCopy(GridCacheContext<?, ?> ctx) { IgniteTxEntry cp = new IgniteTxEntry(); cp.key = key; cp.cacheId = cacheId; cp.ctx = ctx; cp.val = new TxEntryValueHolder(); cp.filters = filters; cp.val.value(val.op(), val.value(), val.hasWriteValue(), val.hasReadValue()); cp.entryProcessorsCol = entryProcessorsCol; cp.ttl = ttl; cp.conflictExpireTime = conflictExpireTime; cp.explicitVer = explicitVer; cp.conflictVer = conflictVer; cp.expiryPlc = expiryPlc; cp.flags = flags; cp.serReadVer = serReadVer; return cp; } /** * @return Node ID. */ public UUID nodeId() { return nodeId; } /** * @param nodeId Node ID. */ public void nodeId(UUID nodeId) { this.nodeId = nodeId; } /** * @return DHT version. */ public GridCacheVersion dhtVersion() { return dhtVer; } /** * @param dhtVer DHT version. */ public void dhtVersion(GridCacheVersion dhtVer) { this.dhtVer = dhtVer; } /** * @return {@code True} if tx entry was marked as locked. */ public boolean locked() { return locked; } /** * Marks tx entry as locked. */ public void markLocked() { locked = true; } /** * Sets partition counter. * * @param partCntr Partition counter. */ public void updateCounter(long partCntr) { this.partUpdateCntr = partCntr; } /** * @return Partition index. */ public long updateCounter() { return partUpdateCntr; } /** * @param val Value to set. */ public void setAndMarkValid(CacheObject val) { setAndMarkValid(op(), val, this.val.hasWriteValue(), this.val.hasReadValue()); } /** * @param op Operation. * @param val Value to set. */ void setAndMarkValid(GridCacheOperation op, CacheObject val) { setAndMarkValid(op, val, this.val.hasWriteValue(), this.val.hasReadValue()); } /** * @param op Operation. * @param val Value to set. * @param hasReadVal Has read value flag. * @param hasWriteVal Has write value flag. */ void setAndMarkValid(GridCacheOperation op, CacheObject val, boolean hasWriteVal, boolean hasReadVal) { this.val.value(op, val, hasWriteVal, hasReadVal); markValid(); } /** * Marks this entry as value-has-bean-read. Effectively, makes values enlisted to transaction visible * to further peek operations. */ public void markValid() { prevVal.value(val.op(), val.value(), val.hasWriteValue(), val.hasReadValue()); } /** * Marks entry as prepared. * * @return True if entry was marked prepared by this call. */ boolean markPrepared() { return PREPARED_UPD.compareAndSet(this, 0, 1); } /** * @return Entry key. */ public KeyCacheObject key() { return key; } /** * @return Cache ID. */ public int cacheId() { return cacheId; } /** * Sets skip store flag value. * * @param skipStore Skip store flag. */ public void skipStore(boolean skipStore) { setFlag(skipStore, SKIP_STORE_FLAG_MASK); } /** * @return Skip store flag. */ public boolean skipStore() { return isFlag(SKIP_STORE_FLAG_MASK); } /** * @param oldValOnPrimary {@code True} If old value for was non null on primary node. */ public void oldValueOnPrimary(boolean oldValOnPrimary) { setFlag(oldValOnPrimary, OLD_VAL_ON_PRIMARY); } /** * @return {@code True} If old value for 'invoke' operation was non null on primary node. */ public boolean oldValueOnPrimary() { return isFlag(OLD_VAL_ON_PRIMARY); } /** * Sets keep binary flag value. * * @param keepBinary Keep binary flag value. */ public void keepBinary(boolean keepBinary) { setFlag(keepBinary, KEEP_BINARY_FLAG_MASK); } /** * @return Keep binary flag value. */ public boolean keepBinary() { return isFlag(KEEP_BINARY_FLAG_MASK); } /** * Sets flag mask. * * @param flag Set or clear. * @param mask Mask. */ private void setFlag(boolean flag, int mask) { flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask); } /** * Reads flag mask. * * @param mask Mask to read. * @return Flag value. */ private boolean isFlag(int mask) { return (flags & mask) != 0; } /** * @return Tx key. */ public IgniteTxKey txKey() { if (txKey == null) txKey = new IgniteTxKey(key, cacheId); return txKey; } /** * @return Underlying cache entry. */ public GridCacheEntryEx cached() { return entry; } /** * @param entry Cache entry. */ public void cached(GridCacheEntryEx entry) { assert entry != null; assert entry.context() == ctx : "Invalid entry assigned to tx entry [txEntry=" + this + ", entry=" + entry + ", ctxNear=" + ctx.isNear() + ", ctxDht=" + ctx.isDht() + ']'; this.entry = entry; } /** * @return Entry value. */ @Nullable public CacheObject value() { return val.value(); } /** * @return Old value. */ @Nullable public CacheObject oldValue() { return oldVal != null ? oldVal.value() : null; } /** * @param oldVal Old value. */ public void oldValue(CacheObject oldVal, boolean hasOldVal) { if (this.oldVal == null) this.oldVal = new TxEntryValueHolder(); this.oldVal.value(op(), oldVal, hasOldVal, hasOldVal); } /** * @return {@code True} if old value present. */ public boolean hasOldValue() { return oldVal != null && oldVal.hasValue(); } /** * @return {@code True} if has value explicitly set. */ public boolean hasValue() { return val.hasValue(); } /** * @return {@code True} if has write value set. */ public boolean hasWriteValue() { return val.hasWriteValue(); } /** * @return {@code True} if has read value set. */ public boolean hasReadValue() { return val.hasReadValue(); } /** * @return Value visible for peek. */ @Nullable public CacheObject previousValue() { return prevVal.value(); } /** * @return {@code True} if has previous value explicitly set. */ boolean hasPreviousValue() { return prevVal.hasValue(); } /** * @return Previous operation to revert entry in case of filter failure. */ @Nullable public GridCacheOperation previousOperation() { return prevVal.op(); } /** * @return Time to live. */ public long ttl() { return ttl; } /** * @param ttl Time to live. */ public void ttl(long ttl) { this.ttl = ttl; } /** * @return Conflict expire time. */ public long conflictExpireTime() { return conflictExpireTime; } /** * @param conflictExpireTime Conflict expire time. */ public void conflictExpireTime(long conflictExpireTime) { this.conflictExpireTime = conflictExpireTime; } /** * @param val Entry value. * @param writeVal Write value flag. * @param readVal Read value flag. */ public void value(@Nullable CacheObject val, boolean writeVal, boolean readVal) { this.val.value(this.val.op(), val, writeVal, readVal); } /** * Sets read value if this tx entry does not have write value yet. * * @param val Read value to set. */ public void readValue(@Nullable CacheObject val) { this.val.value(this.val.op(), val, false, true); } /** * @param entryProcessor Entry processor. * @param invokeArgs Optional arguments for EntryProcessor. */ public void addEntryProcessor(EntryProcessor<Object, Object, Object> entryProcessor, Object[] invokeArgs) { if (entryProcessorsCol == null) entryProcessorsCol = new LinkedList<>(); entryProcessorsCol.add(new T2<>(entryProcessor, invokeArgs)); // Must clear transform closure bytes since collection has changed. transformClosBytes = null; val.op(TRANSFORM); } /** * @return Collection of entry processors. */ public Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessors() { return entryProcessorsCol; } /** * @param cacheVal Value. * @return New value. */ @SuppressWarnings("unchecked") public CacheObject applyEntryProcessors(CacheObject cacheVal) { GridCacheVersion ver; try { ver = entry.version(); } catch (GridCacheEntryRemovedException ignore) { assert tx == null || tx.optimistic() : tx; ver = null; } Object val = null; Object keyVal = null; for (T2<EntryProcessor<Object, Object, Object>, Object[]> t : entryProcessors()) { try { CacheInvokeEntry<Object, Object> invokeEntry = new CacheInvokeEntry(key, keyVal, cacheVal, val, ver, keepBinary(), cached()); EntryProcessor processor = t.get1(); processor.process(invokeEntry, t.get2()); val = invokeEntry.getValue(); keyVal = invokeEntry.key(); } catch (Exception ignore) { // No-op. } } return ctx.toCacheObject(val); } /** * @param entryProcessorsCol Collection of entry processors. */ public void entryProcessors( @Nullable Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessorsCol) { this.entryProcessorsCol = entryProcessorsCol; // Must clear transform closure bytes since collection has changed. transformClosBytes = null; } /** * @return Cache operation. */ public GridCacheOperation op() { return val.op(); } /** * @param op Cache operation. */ public void op(GridCacheOperation op) { val.op(op); } /** * @return {@code True} if read entry. */ public boolean isRead() { return op() == READ; } /** * @param explicitVer Explicit version. */ public void explicitVersion(GridCacheVersion explicitVer) { this.explicitVer = explicitVer; } /** * @return Explicit version. */ public GridCacheVersion explicitVersion() { return explicitVer; } /** * @return Conflict version. */ @Nullable public GridCacheVersion conflictVersion() { return conflictVer; } /** * @param conflictVer Conflict version. */ public void conflictVersion(@Nullable GridCacheVersion conflictVer) { this.conflictVer = conflictVer; } /** * @return Put filters. */ public CacheEntryPredicate[] filters() { return filters; } /** * @param filters Put filters. */ public void filters(CacheEntryPredicate[] filters) { this.filters = filters; } /** * @return {@code True} if filters passed for fast-commit transactions. */ public boolean filtersPassed() { return filtersPassed; } /** * @param filtersPassed {@code True} if filters passed for fast-commit transactions. */ public void filtersPassed(boolean filtersPassed) { this.filtersPassed = filtersPassed; } /** * @return {@code True} if filters are set. */ public boolean filtersSet() { return filtersSet; } /** * @param filtersSet {@code True} if filters are set and should not be replaced. */ public void filtersSet(boolean filtersSet) { this.filtersSet = filtersSet; } /** * @param ctx Context. * @param transferExpiry {@code True} if expire policy should be marshalled. * @throws IgniteCheckedException If failed. */ public void marshal(GridCacheSharedContext<?, ?> ctx, boolean transferExpiry) throws IgniteCheckedException { if (filters != null) { for (CacheEntryPredicate p : filters) { if (p != null) p.prepareMarshal(this.ctx); } } // Do not serialize filters if they are null. if (transformClosBytes == null && entryProcessorsCol != null) transformClosBytes = CU.marshal(this.ctx, entryProcessorsCol); if (transferExpiry) transferExpiryPlc = expiryPlc != null && expiryPlc != this.ctx.expiry(); key.prepareMarshal(context().cacheObjectContext()); val.marshal(context()); if (transferExpiryPlc) { if (expiryPlcBytes == null) expiryPlcBytes = CU.marshal(this.ctx, new IgniteExternalizableExpiryPolicy(expiryPlc)); } else expiryPlcBytes = null; } /** * Unmarshalls entry. * * @param ctx Cache context. * @param near Near flag. * @param clsLdr Class loader. * @throws IgniteCheckedException If un-marshalling failed. */ public void unmarshal(GridCacheSharedContext<?, ?> ctx, boolean near, ClassLoader clsLdr) throws IgniteCheckedException { if (this.ctx == null) { GridCacheContext<?, ?> cacheCtx = ctx.cacheContext(cacheId); assert cacheCtx != null : "Failed to find cache context [cacheId=" + cacheId + ", readyTopVer=" + ctx.exchange().readyAffinityVersion() + ']'; if (cacheCtx.isNear() && !near) cacheCtx = cacheCtx.near().dht().context(); else if (!cacheCtx.isNear() && near) cacheCtx = cacheCtx.dht().near().context(); this.ctx = cacheCtx; } // Unmarshal transform closure anyway if it exists. if (transformClosBytes != null && entryProcessorsCol == null) entryProcessorsCol = U.unmarshal(ctx, transformClosBytes, U.resolveClassLoader(clsLdr, ctx.gridConfig())); if (filters == null) filters = CU.empty0(); else { for (CacheEntryPredicate p : filters) { if (p != null) p.finishUnmarshal(ctx.cacheContext(cacheId), clsLdr); } } key.finishUnmarshal(context().cacheObjectContext(), clsLdr); val.unmarshal(this.ctx, clsLdr); if (expiryPlcBytes != null && expiryPlc == null) expiryPlc = U.unmarshal(ctx, expiryPlcBytes, U.resolveClassLoader(clsLdr, ctx.gridConfig())); } /** * @param expiryPlc Expiry policy. */ public void expiry(@Nullable ExpiryPolicy expiryPlc) { this.expiryPlc = expiryPlc; } /** * @return Expiry policy. */ @Nullable public ExpiryPolicy expiry() { return expiryPlc; } /** * @return Entry processor calculated value. */ public T2<GridCacheOperation, CacheObject> entryProcessorCalculatedValue() { return entryProcessorCalcVal; } /** * @param entryProcessorCalcVal Entry processor calculated value. */ public void entryProcessorCalculatedValue(T2<GridCacheOperation, CacheObject> entryProcessorCalcVal) { assert entryProcessorCalcVal != null; this.entryProcessorCalcVal = entryProcessorCalcVal; } /** * Gets stored entry version. Version is stored for all entries in serializable transaction or * when value is read using {@link IgniteCache#getEntry(Object)} method. * * @return Entry version. */ @Nullable public GridCacheVersion entryReadVersion() { return serReadVer; } /** * @param ver Entry version. */ public void entryReadVersion(GridCacheVersion ver) { assert this.serReadVer == null: "Wrong version [serReadVer=" + serReadVer + ", ver=" + ver + "]"; assert ver != null; this.serReadVer = ver; } /** * Clears recorded read version, should be done before starting commit of not serializable/optimistic transaction. */ public void clearEntryReadVersion() { serReadVer = null; } /** {@inheritDoc} */ @Override public void onAckReceived() { // No-op. } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeInt("cacheId", cacheId)) return false; writer.incrementState(); case 1: if (!writer.writeLong("conflictExpireTime", conflictExpireTime)) return false; writer.incrementState(); case 2: if (!writer.writeMessage("conflictVer", conflictVer)) return false; writer.incrementState(); case 3: if (!writer.writeByteArray("expiryPlcBytes", expiryPlcBytes)) return false; writer.incrementState(); case 4: if (!writer.writeMessage("explicitVer", explicitVer)) return false; writer.incrementState(); case 5: if (!writer.writeObjectArray("filters", !F.isEmptyOrNulls(filters) ? filters : null, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 6: if (!writer.writeByte("flags", flags)) return false; writer.incrementState(); case 7: if (!writer.writeMessage("key", key)) return false; writer.incrementState(); case 8: if (!writer.writeMessage("oldVal", oldVal)) return false; writer.incrementState(); case 9: if (!writer.writeMessage("serReadVer", serReadVer)) return false; writer.incrementState(); case 10: if (!writer.writeByteArray("transformClosBytes", transformClosBytes)) return false; writer.incrementState(); case 11: if (!writer.writeLong("ttl", ttl)) return false; writer.incrementState(); case 12: if (!writer.writeMessage("val", val)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; switch (reader.state()) { case 0: cacheId = reader.readInt("cacheId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 1: conflictExpireTime = reader.readLong("conflictExpireTime"); if (!reader.isLastRead()) return false; reader.incrementState(); case 2: conflictVer = reader.readMessage("conflictVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 3: expiryPlcBytes = reader.readByteArray("expiryPlcBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: explicitVer = reader.readMessage("explicitVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 5: filters = reader.readObjectArray("filters", MessageCollectionItemType.MSG, CacheEntryPredicate.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 6: flags = reader.readByte("flags"); if (!reader.isLastRead()) return false; reader.incrementState(); case 7: key = reader.readMessage("key"); if (!reader.isLastRead()) return false; reader.incrementState(); case 8: oldVal = reader.readMessage("oldVal"); if (!reader.isLastRead()) return false; reader.incrementState(); case 9: serReadVer = reader.readMessage("serReadVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 10: transformClosBytes = reader.readByteArray("transformClosBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 11: ttl = reader.readLong("ttl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 12: val = reader.readMessage("val"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(IgniteTxEntry.class); } /** {@inheritDoc} */ @Override public short directType() { return 100; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 13; } /** {@inheritDoc} */ @Override public Class<?> deployClass() { ClassLoader clsLdr = getClass().getClassLoader(); CacheObject val = value(); // First of all check classes that may be loaded by class loader other than application one. return key != null && !clsLdr.equals(key.getClass().getClassLoader()) ? key.getClass() : val != null ? val.getClass() : getClass(); } /** {@inheritDoc} */ @Override public ClassLoader classLoader() { return deployClass().getClassLoader(); } /** {@inheritDoc} */ @Override public String toString() { return GridToStringBuilder.toString(IgniteTxEntry.class, this, "xidVer", tx == null ? "null" : tx.xidVersion()); } }
/* * Author: Chih-Chiang Tsou <chihchiang.tsou@gmail.com> * Nesvizhskii Lab, Department of Computational Medicine and Bioinformatics, * University of Michigan, Ann Arbor * * Copyright 2014 University of Michigan, Ann Arbor, MI * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package DIA_Umpire_Quant; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ import MSUmpire.BaseDataStructure.UmpireInfo; import MSUmpire.DIA.DIAPack; import MSUmpire.BaseDataStructure.DBSearchParam; import MSUmpire.BaseDataStructure.TandemParam; import MSUmpire.PSMDataStructure.FragmentPeak; import MSUmpire.PSMDataStructure.LCMSID; import MSUmpire.PSMDataStructure.PTMManager; import MSUmpire.PSMDataStructure.ProtID; import MSUmpire.PSMDataStructure.FragmentSelection; import MSUmpire.Utility.ExportTable; import MSUmpire.SearchResultParser.ProtXMLParser; import MSUmpire.Utility.ConsoleLogger; import MSUmpire.Utility.DateTimeTag; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * @author Chih-Chiang Tsou */ public class DIA_Umpire_ProtQuant { /** * @param args the command line arguments */ public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println("================================================================================================="); System.out.println("DIA-Umpire protein quantitation module (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println("command format error, the correct format should be: java -jar -Xmx10G DIA_Umpire_PortQuant.jar diaumpire_module.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_orotquant.log"); } catch (Exception e) { } Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; String Combined_Prot = ""; boolean DefaultProtFiltering = true; float Freq = 0f; int TopNPep = 6; int TopNFrag = 6; String FilterWeight = "GW"; float MinWeight = 0.9f; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); boolean ExportSaint = false; boolean SAINT_MS1 = false; boolean SAINT_MS2 = true; HashMap<String, String[]> BaitList = new HashMap<>(); HashMap<String, String> BaitName = new HashMap<>(); HashMap<String, String[]> ControlList = new HashMap<>(); HashMap<String, String> ControlName = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "Fasta": { tandemPara.FastaPath = value; break; } case "Combined_Prot": { Combined_Prot = value; break; } case "DefaultProtFiltering": { DefaultProtFiltering = Boolean.parseBoolean(value); break; } case "DecoyPrefix": { if (!"".equals(value)) { tandemPara.DecoyPrefix = value; } break; } case "ProteinFDR": { tandemPara.ProtFDR = Float.parseFloat(value); break; } case "FilterWeight": { FilterWeight = value; break; } case "MinWeight": { MinWeight = Float.parseFloat(value); break; } case "TopNFrag": { TopNFrag = Integer.parseInt(value); break; } case "TopNPep": { TopNPep = Integer.parseInt(value); break; } case "Freq": { Freq = Float.parseFloat(value); break; } //<editor-fold defaultstate="collapsed" desc="SaintOutput"> case "ExportSaintInput": { ExportSaint = Boolean.parseBoolean(value); break; } case "QuantitationType": { switch (value) { case "MS1": { SAINT_MS1 = true; SAINT_MS2 = false; break; } case "MS2": { SAINT_MS1 = false; SAINT_MS2 = true; break; } case "BOTH": { SAINT_MS1 = true; SAINT_MS2 = true; break; } } break; } // case "BaitInputFile": { // SaintBaitFile = value; // break; // } // case "PreyInputFile": { // SaintPreyFile = value; // break; // } // case "InterationInputFile": { // SaintInteractionFile = value; // break; // } default: { if (type.startsWith("BaitName_")) { BaitName.put(type.substring(9), value); } if (type.startsWith("BaitFile_")) { BaitList.put(type.substring(9), value.split("\t")); } if (type.startsWith("ControlName_")) { ControlName.put(type.substring(12), value); } if (type.startsWith("ControlFile_")) { ControlList.put(type.substring(12), value.split("\t")); } break; } //</editor-fold> } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); //Check if the fasta file can be found if (!new File(tandemPara.FastaPath).exists()) { Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath + " cannot be found, the process will be terminated, please check."); System.exit(1); } //Check if the prot.xml file can be found if (!new File(Combined_Prot).exists()) { Logger.getRootLogger().info("ProtXML file: " + Combined_Prot + " cannot be found, the export protein summary table will be empty."); } LCMSID protID = null; //Parse prot.xml and generate protein master list given an FDR if (Combined_Prot != null && !Combined_Prot.equals("")) { protID = LCMSID.ReadLCMSIDSerialization(Combined_Prot); if (!"".equals(Combined_Prot) && protID == null) { protID = new LCMSID(Combined_Prot, tandemPara.DecoyPrefix, tandemPara.FastaPath); ProtXMLParser protxmlparser = new ProtXMLParser(protID, Combined_Prot, 0f); //Use DIA-Umpire default protein FDR calculation if (DefaultProtFiltering) { protID.RemoveLowLocalPWProtein(0.8f); protID.RemoveLowMaxIniProbProtein(0.9f); protID.FilterByProteinDecoyFDRUsingMaxIniProb(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } //Get protein FDR calculation without other filtering else { protID.FilterByProteinDecoyFDRUsingLocalPW(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } protID.LoadSequence(); protID.WriteLCMSIDSerialization(Combined_Prot); } Logger.getRootLogger().info("Protein No.:" + protID.ProteinList.size()); } HashMap<String, HashMap<String, FragmentPeak>> IDSummaryFragments = new HashMap<>(); //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); try { File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); } for (File fileEntry : AssignFiles.values()) { String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); Logger.getRootLogger().info("================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } Logger.getRootLogger().info("Loading identification results " + mzXMLFile + "...."); //If the serialization file for ID file existed if (DiaFile.ReadSerializedLCMSID()) { DiaFile.IDsummary.ReduceMemoryUsage(); DiaFile.IDsummary.ClearAssignPeakCluster(); FileList.add(DiaFile); HashMap<String, FragmentPeak> FragMap = new HashMap<>(); IDSummaryFragments.put(FilenameUtils.getBaseName(mzXMLFile), FragMap); } } } //<editor-fold defaultstate="collapsed" desc="Peptide and fragment selection"> Logger.getRootLogger().info("Peptide and fragment selection across the whole dataset"); ArrayList<LCMSID> SummaryList = new ArrayList<>(); for (DIAPack diafile : FileList) { if (protID != null) { //Generate protein list according to mapping of peptide ions for each DIA file to the master protein list diafile.IDsummary.GenerateProteinByRefIDByPepSeq(protID, true); diafile.IDsummary.ReMapProPep(); } if ("GW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByGroupWeight(); } else if ("PepW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByWeight(); } SummaryList.add(diafile.IDsummary); } FragmentSelection fragselection = new FragmentSelection(SummaryList); fragselection.freqPercent = Freq; fragselection.GeneratePepFragScoreMap(); fragselection.GenerateTopFragMap(TopNFrag); fragselection.GenerateProtPepScoreMap(MinWeight); fragselection.GenerateTopPepMap(TopNPep); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Writing general reports"> ExportTable export = new ExportTable(WorkFolder, SummaryList, IDSummaryFragments, protID, fragselection); export.Export(TopNPep, TopNFrag, Freq); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="//<editor-fold defaultstate="collapsed" desc="Generate SAINT input files"> if (ExportSaint && protID != null) { HashMap<String, DIAPack> Filemap = new HashMap<>(); for (DIAPack DIAfile : FileList) { Filemap.put(DIAfile.GetBaseName(), DIAfile); } FileWriter baitfile = new FileWriter(WorkFolder + "SAINT_Bait_" + DateTimeTag.GetTag() + ".txt"); FileWriter preyfile = new FileWriter(WorkFolder + "SAINT_Prey_" + DateTimeTag.GetTag() + ".txt"); FileWriter interactionfileMS1 = null; FileWriter interactionfileMS2 = null; if (SAINT_MS1) { interactionfileMS1 = new FileWriter(WorkFolder + "SAINT_Interaction_MS1_" + DateTimeTag.GetTag() + ".txt"); } if (SAINT_MS2) { interactionfileMS2 = new FileWriter(WorkFolder + "SAINT_Interaction_MS2_" + DateTimeTag.GetTag() + ".txt"); } HashMap<String, String> PreyID = new HashMap<>(); for (String samplekey : ControlName.keySet()) { String name = ControlName.get(samplekey); for (String file : ControlList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "C\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } for (String samplekey : BaitName.keySet()) { String name = BaitName.get(samplekey); for (String file : BaitList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "T\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } baitfile.close(); if (SAINT_MS1) { interactionfileMS1.close(); } if (SAINT_MS2) { interactionfileMS2.close(); } for (String AccNo : PreyID.keySet()) { preyfile.write(AccNo + "\t" + PreyID.get(AccNo) + "\n"); } preyfile.close(); } //</editor-fold> Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info("================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } } private static void SaintOutput(LCMSID protID, LCMSID IDsummary, FragmentSelection fragselection, FileWriter interactionfile, String filename, String samplename, HashMap<String, String> PreyID, int quanttype) throws IOException { for (String key : protID.ProteinList.keySet()) { if (IDsummary.ProteinList.containsKey(key)) { ProtID protein = IDsummary.ProteinList.get(key); float abundance = 0f; if (quanttype == 1) { abundance = protein.GetAbundanceByMS1_IBAQ(); } else if (quanttype == 2) { abundance = protein.GetAbundanceByTopCorrFragAcrossSample(fragselection.TopPeps.get(protein.getAccNo()), fragselection.TopFrags); } if (abundance > 0) { interactionfile.write(FilenameUtils.getBaseName(filename) + "\t" + samplename + "\t" + protein.getAccNo() + "\t" + abundance + "\n"); if (!PreyID.containsKey(protein.getAccNo())) { PreyID.put(protein.getAccNo(), /*protein.Sequence.length()+"\t"+*/ protein.GetGeneName()); } } } } } }
/* Copyright 2009 Semantic Discovery, Inc. (www.semanticdiscovery.com) This file is part of the Semantic Discovery Toolkit. The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Semantic Discovery Toolkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>. */ package org.sd.token; import java.util.Map; import org.sd.token.Token; import org.sd.util.range.IntegerRange; import org.sd.xml.DomElement; import org.sd.xml.DomNode; import org.sd.xml.DomUtil; /** * Abstract TokenClassifier with convenience methods for adding features. * <p> * @author Spence Koehler */ public class TokenClassifierHelper { private Map<String, Normalizer> id2Normalizer; public Map<String, Normalizer> getId2Normalizer() { return id2Normalizer; } /** * Normalizer to use when classifying token text. */ private Normalizer normalizer; public Normalizer getNormalizer() { return normalizer; } public void setNormalizer(Normalizer normalizer) { this.normalizer = normalizer; } /** * Maximum number of words to be considered for classification. * * If 0, then any number of words are accepted. */ private int maxWordCount; public int getMaxWordCount() { return maxWordCount; } public void setMaxWordCount(int maxWordCount) { this.maxWordCount = maxWordCount; } /** * Valid range for length of a token to be considered for classification. * (Unlimited if null.) */ private IntegerRange validTokenLength; public IntegerRange getValidTokenLength() { return validTokenLength; } public void setValidTokenLength(IntegerRange validTokenLength) { this.validTokenLength = validTokenLength; } public boolean lengthIsInRange(Token token) { return validTokenLength == null || validTokenLength.includes(token.getLength()); } /** * Classifier's name. */ private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } /** * Classification utility to identify text as digits. * <p> * If true, the asInt[0] will be the numerical value of the digits. */ public static boolean isDigits(String text, int[] asInt) { return isDigits(text, asInt, true); } public static boolean isDigits(String text, int[] asInt, boolean requireTrueDigit) { boolean result = true; int value = 0; int tens = 1; boolean hasTrueDigit = !requireTrueDigit; // scan from right to left, building the integer value for (int textIndex = text.length() - 1; textIndex >= 0; --textIndex) { int c = text.codePointAt(textIndex); int digit = 0; if (c >= '0' && c <= '9') { digit = (int)(c - '0'); hasTrueDigit = true; } else { // check for OCR-like digits final Integer ocrDigit = interpretMistakenDigit(c); if (ocrDigit != null) { digit = ocrDigit; } else { result = false; break; } } if (digit > 0) value += (digit * tens); tens *= 10; } // only accept OCR-like errors if there was a true digit in the mix if (!hasTrueDigit) result = false; if (result) asInt[0] = value; return result; } /** * Classification utility to identify digits in text. * <p> * If true, the asInt[0] will be the numerical value of the digits. */ public static boolean hasDigits(String text, int[] asInt) { return hasDigits(text, asInt, true); } public static boolean hasDigits(String text, int[] asInt, boolean requireTrueDigit) { boolean result = false; int value = 0; int tens = 1; boolean hasTrueDigit = !requireTrueDigit; // scan from right to left, building the integer value for (int textIndex = text.length() - 1; textIndex >= 0; --textIndex) { int c = text.codePointAt(textIndex); int digit = 0; if (c >= '0' && c <= '9') { digit = (int)(c - '0'); result = hasTrueDigit = true; } else { // check for OCR-like digits final Integer ocrDigit = interpretMistakenDigit(c); if (ocrDigit != null) { digit = ocrDigit; result = true; } } if (digit > 0) value += (digit * tens); tens *= 10; } // only accept OCR-like errors if there was a true digit in the mix if (!hasTrueDigit) result = false; if (result) asInt[0] = value; return result; } public boolean meetsConstraints(Token token) { return ((maxWordCount == 0 || token.getWordCount() <= maxWordCount) && lengthIsInRange(token)); } public TokenClassifierHelper() { init(null, 0); } public TokenClassifierHelper(Normalizer normalizer, int maxWordCount) { init(normalizer, maxWordCount); } // // <classifierId validTokenLength='rangeExpr'> // <maxWordCount>N</maxWordCount> // <normalizer>...</normalizer> // <jclass>...classifier class...</jclass> // ...config elts for classifier class... // </classifierId> // public TokenClassifierHelper(DomElement classifierIdElement, Map<String, Normalizer> id2Normalizer) { this.id2Normalizer = id2Normalizer; final Normalizer normalizer = loadNormalizer((DomNode)classifierIdElement.selectSingleNode("normalizer"), id2Normalizer); final int maxWordCount = DomUtil.getSelectedNodeInt(classifierIdElement, "maxWordCount", 0); this.name = classifierIdElement.getAttributeValue("name", null); init(normalizer, maxWordCount); final String vtlString = classifierIdElement.getAttributeValue("validTokenLength", null); if (vtlString != null && !"".equals(vtlString)) { this.validTokenLength = new IntegerRange(vtlString); } } private void init(Normalizer normalizer, int maxWordCount) { this.normalizer = normalizer; this.maxWordCount = maxWordCount; this.validTokenLength = null; } /** * Add a feature with the given type and value and P=1.0 to the token. */ public void addFeature(Token token, String type, String value) { addFeature(token, type, value, 1.0); } /** * Add a feature with the given type, value, and probability to the token. */ public void addFeature(Token token, String type, String value, double p) { token.getFeatures().add(new Feature(type, value, p, this)); } public static Integer interpretMistakenDigit(int c) { Integer digit = null; // check for OCR-like digits final int lc = Character.toLowerCase(c); if (lc == 'o') { digit = 0; } else if (lc == 'l' || lc == 'i') { digit = 1; } else if (c == 'B') { digit = 8; } else if (lc == 'b') { digit = 6; } else if (lc == 's') { digit = 5; } return digit; } /** * Get the token's normalized text according to this Classifier's * normalizer, returning null if the MaxWordCount constraint is not met. */ public String getNormalizedText(Token token) { if (maxWordCount > 0 && token.getWordCount() > maxWordCount) return null; String result = token.getText(); if (normalizer != null) { boolean alreadyNormalized = false; final String tokenFeature = normalizer.getTokenFeature(); if (tokenFeature != null) { Object normalized = token.getFeatureValue(tokenFeature, normalizer); if (normalized != null) { result = normalized.toString(); alreadyNormalized = true; } } if (!alreadyNormalized) { result = normalizer.normalize(token.getText()); if (tokenFeature != null) { token.setFeature(tokenFeature, result, normalizer); } } } return result; } public String normalize(String text) { String result = text; if (normalizer != null) { result = normalizer.normalize(text); } return result; } private Normalizer loadNormalizer(DomNode domNode, Map<String, Normalizer> id2Normalizer) { if (domNode == null) return null; Normalizer result = null; final String id = domNode.getAttributeValue("id"); result = id2Normalizer.get(id); if (result == null) { // load from 'options' final DomElement optionsElement = (DomElement)domNode.selectSingleNode("options"); if (optionsElement != null) { final StandardNormalizerOptions options = new StandardNormalizerOptions(optionsElement); result = new StandardNormalizer(options); } } return result; } }
package com.borismus.webintent; import java.util.HashMap; import java.util.Map; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.Intent; import android.net.Uri; import android.text.Html; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; /** * WebIntent is a PhoneGap plugin that bridges Android intents and web * applications: * * 1. web apps can spawn intents that call native Android applications. 2. * (after setting up correct intent filters for PhoneGap applications), Android * intents can be handled by PhoneGap web applications. * * @author boris@borismus.com * */ public class WebIntent extends CordovaPlugin { private CallbackContext callbackContext = null; /** * Executes the request and returns PluginResult. * * @param action * The action to execute. * @param args * JSONArray of arguments for the plugin. * @param callbackId * The callback id used when calling back into JavaScript. * @return A PluginResult object with a status and message. */ public boolean execute(String action, JSONArray args, final CallbackContext callbackContext) throws JSONException { try { this.callbackContext = callbackContext; if (action.equals("startActivity")) { if (args.length() != 1) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } // Parse the arguments JSONObject obj = args.getJSONObject(0); String type = obj.has("type") ? obj.getString("type") : null; Uri uri = obj.has("url") ? Uri.parse(obj.getString("url")) : null; JSONObject extras = obj.has("extras") ? obj.getJSONObject("extras") : null; Map<String, String> extrasMap = new HashMap<String, String>(); // Populate the extras if any exist if (extras != null) { JSONArray extraNames = extras.names(); for (int i = 0; i < extraNames.length(); i++) { String key = extraNames.getString(i); String value = extras.getString(key); extrasMap.put(key, value); } } startActivity(obj.getString("action"), uri, type, extrasMap); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); return true; } else if (action.equals("hasExtra")) { if (args.length() != 1) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } Intent i = this.cordova.getActivity().getIntent(); String extraName = args.getString(0); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, i.hasExtra(extraName))); return true; } else if (action.equals("getExtra")) { if (args.length() != 1) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } Intent i = this.cordova.getActivity().getIntent(); String extraName = args.getString(0); if (i.hasExtra(extraName)) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, i.getStringExtra(extraName))); return true; } else { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR)); return false; } } else if (action.equals("getUri")) { if (args.length() != 0) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } Intent i = this.cordova.getActivity().getIntent(); String uri = i.getDataString(); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, uri)); return true; } else if (action.equals("onNewIntent")) { if (args.length() != 0) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT); result.setKeepCallback(true); callbackContext.sendPluginResult(result); return true; } else if (action.equals("sendBroadcast")) { if (args.length() != 1) { callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } // Parse the arguments JSONObject obj = args.getJSONObject(0); JSONObject extras = obj.has("extras") ? obj.getJSONObject("extras") : null; Map<String, String> extrasMap = new HashMap<String, String>(); // Populate the extras if any exist if (extras != null) { JSONArray extraNames = extras.names(); for (int i = 0; i < extraNames.length(); i++) { String key = extraNames.getString(i); String value = extras.getString(key); extrasMap.put(key, value); } } sendBroadcast(obj.getString("action"), extrasMap); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.OK)); return true; } callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } catch (JSONException e) { e.printStackTrace(); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.JSON_EXCEPTION)); return false; } } @Override public void onNewIntent(Intent intent) { if (this.callbackContext != null) { PluginResult result = new PluginResult(PluginResult.Status.OK, intent.getDataString()); result.setKeepCallback(true); this.callbackContext.success(intent.getDataString()); } } void startActivity(String action, Uri uri, String type, Map<String, String> extras) { Intent i = (uri != null ? new Intent(action, uri) : new Intent(action)); if (type != null && uri != null) { i.setDataAndType(uri, type); //Fix the crash problem with android 2.3.6 } else { if (type != null) { i.setType(type); } } for (String key : extras.keySet()) { String value = extras.get(key); // If type is text html, the extra text must sent as HTML if (key.equals(Intent.EXTRA_TEXT) && type.equals("text/html")) { i.putExtra(key, Html.fromHtml(value)); } else if (key.equals(Intent.EXTRA_STREAM)) { // allowes sharing of images as attachments. // value in this case should be a URI of a file i.putExtra(key, Uri.parse(value)); } else if (key.equals(Intent.EXTRA_EMAIL)) { // allows to add the email address of the receiver i.putExtra(Intent.EXTRA_EMAIL, new String[] { value }); } else { i.putExtra(key, value); } } this.cordova.getActivity().startActivity(i); } void sendBroadcast(String action, Map<String, String> extras) { Intent intent = new Intent(); intent.setAction(action); for (String key : extras.keySet()) { String value = extras.get(key); intent.putExtra(key, value); } this.cordova.getActivity().sendBroadcast(intent); } }
/* * Copyright 2013-2018 Jonathan Vasquez <jon@xyinn.org> * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.vasquez.Windows; import com.vasquez.EntryWithModel; import com.vasquez.Listing; import java.awt.BorderLayout; import java.awt.Dialog; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.io.File; import javax.swing.BorderFactory; import javax.swing.DefaultComboBoxModel; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JDialog; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.JTextField; import javax.swing.filechooser.FileNameExtensionFilter; public class AddWindow extends JDialog { protected EntryWithModel tableManager; protected JTable entryTable; protected JComboBox version; protected JTextField path; protected JTextField flags; protected JTextField label; protected JCheckBox expansion; public AddWindow(JFrame mainWindow, EntryWithModel tableManager, JTable entryTable) { super(mainWindow, "Add Entry", Dialog.ModalityType.DOCUMENT_MODAL); // Set window properties setDefaultCloseOperation(DISPOSE_ON_CLOSE); setResizable(false); // Bring in the table manager and entry table resources this.tableManager = tableManager; this.entryTable = entryTable; // Create components and listeners JButton find = new JButton("Set Path"); JButton add = new JButton("Add"); JButton cancel = new JButton("Cancel"); find.addActionListener(new FindGameListener(this)); add.addActionListener(new addListener()); cancel.addActionListener(new cancelListener()); JLabel versionL = new JLabel("Version:"); JLabel labelL = new JLabel("Label:"); JLabel pathL = new JLabel("Path (Game.exe):"); JLabel flagsL = new JLabel("Flags:"); version = new JComboBox(Listing.classicVersions); path = new JTextField(); flags = new JTextField(); label = new JTextField(); expansion = new JCheckBox("Expansion"); path.setEditable(false); version.addItemListener(new ComboBoxListener()); expansion.addItemListener(new ExpansionListener()); version.setSelectedIndex(0); SetLabelToCurrentComboBoxVersion(); // Create the layout and add the components to their respective places JPanel centerPanel = new JPanel(new GridLayout(4,2)); JPanel southPanel = new JPanel(new GridLayout(1,2)); southPanel.setBorder(BorderFactory.createEmptyBorder(10,10,10,10)); centerPanel.setBorder(BorderFactory.createEmptyBorder(10,10,0,10)); getContentPane().add(BorderLayout.CENTER, centerPanel); getContentPane().add(BorderLayout.SOUTH, southPanel); southPanel.add(expansion); southPanel.add(find); southPanel.add(add); southPanel.add(cancel); centerPanel.add(versionL); centerPanel.add(version); centerPanel.add(labelL); centerPanel.add(label); centerPanel.add(pathL); centerPanel.add(path); centerPanel.add(flagsL); centerPanel.add(flags); } private class addListener implements ActionListener { public void actionPerformed(ActionEvent ev) { String currentLabel = label.getText(); String selectedVersion = version.getSelectedItem().toString(); String selectedPath = getPathToUse(); if (selectedPath == null) return; // Used to set the row to the new value added to the list int result = 0; if(!selectedVersion.isEmpty() && !currentLabel.isEmpty()) { // Check to see that the path we want to rename to doesn't already exist (if it's a different name) if(tableManager.doesEntryExistForThisMode(currentLabel, expansion.isSelected(), selectedVersion, selectedPath)) { // display error message JOptionPane.showMessageDialog(entryTable, "There is an entry that already exists with the name: " + currentLabel + "!", "Entry already exists", JOptionPane.ERROR_MESSAGE); return; } result = tableManager.addEntry( version.getSelectedItem().toString(), selectedPath, flags.getText(), label.getText(), expansion.isSelected()); entryTable.setRowSelectionInterval(result, result); } entryTable.repaint(); dispose(); } } private String getPathToUse() { String pathToUse = path.getText(); // Quality of Life. If the user leaves the path blank, // use the first one on the list if available. Otherwise, // display error message. if(pathToUse.isEmpty()) { if(tableManager.getSize() == 0) { // display error message JOptionPane.showMessageDialog(entryTable, "Unable to determine the path since you don't have any entries to automatically pull the path from!", "Unable to deduce Path", JOptionPane.ERROR_MESSAGE); return null; } pathToUse = tableManager.getEntry(0).Path; } return pathToUse; } private class cancelListener implements ActionListener { public void actionPerformed(ActionEvent ev) { dispose(); } } private class FindGameListener implements ActionListener { private JDialog parentWindow; public FindGameListener(JDialog window) { parentWindow = window; } public void actionPerformed(ActionEvent ev) { JFileChooser fc = new JFileChooser(); FileNameExtensionFilter filter = new FileNameExtensionFilter("Game.exe", "exe"); fc.setFileFilter(filter); fc.setCurrentDirectory(new File("C:\\")); int result = fc.showOpenDialog(parentWindow); if(result == JFileChooser.APPROVE_OPTION) { File file = fc.getSelectedFile(); path.setText(file.getAbsolutePath()); } } } private class ExpansionListener implements ItemListener { public void itemStateChanged(ItemEvent arg0) { DefaultComboBoxModel d = null; if(expansion.isSelected()) { d = new DefaultComboBoxModel(Listing.expansionVersions); } else { d = new DefaultComboBoxModel(Listing.classicVersions); } version.setModel(d); version.setSelectedIndex(0); SetLabelToCurrentComboBoxVersion(); } } private class ComboBoxListener implements ItemListener { public void itemStateChanged(ItemEvent arg0) { SetLabelToCurrentComboBoxVersion(); } } private void SetLabelToCurrentComboBoxVersion() { label.setText(version.getSelectedItem().toString()); } }
/* * Copyright 2012 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package teams.domain; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import com.google.common.collect.Lists; import javax.persistence.Transient; @SuppressWarnings("serial") public class Team implements Serializable { private String id; private String name; private String description; private List<Member> members = new ArrayList<>(); @Transient private Role viewerRole; private boolean viewable; private int numberOfMembers; private Stem stem; public Team() { } public Team(String id) { this.id = id; } /** * @param id of the team * @param name of the team * @param description extra description * @param members {@link List} of {@link Member}'s */ public Team(String id, String name, String description, List<Member> members) { this.id = id; this.name = name; this.description = description; this.members = Lists.newArrayList(members); } /** * @param id of the team * @param name of the team * @param description extra description * @param members {@link List} of {@link Member}'s * @param viewable if {@literal false} then it's a private team */ public Team(String id, String name, String description, List<Member> members, boolean viewable) { this(id, name, description, members); this.viewable = viewable; } public Team(String id, String name, String description, List<Member> members, boolean viewable, int numberOfMembers) { this(id, name, description, members); this.viewable = viewable; this.numberOfMembers = numberOfMembers; } /** * @param id of the team * @param name of the team * @param description extra description * @param members {@link List} of {@link Member}'s * @param stem {@link Stem} of this team * @param viewable if {@literal false} then it's a private team */ public Team(String id, String name, String description, List<Member> members, Stem stem, boolean viewable) { this(id, name, description, members, viewable); this.stem = stem; } /** * @param id of the team * @param name of the team * @param description extra description */ public Team(String id, String name, String description) { this(id, name, description, new ArrayList<>()); } /** * @param id of the team * @param name of the team * @param description extra description * @param viewable if {@literal false} then it's a private team */ public Team(String id, String name, String description, boolean viewable) { this(id, name, description, new ArrayList<>()); this.viewable = viewable; } /** * @param id {@link String the identifier of the team} * @param name {@link String} the name of the team * @param description {@link String} the description of the team * @param stem {@link teams.domain.Stem} the stem that this team belongs to * @param viewable if {@literal false} then it's a private team */ public Team(String id, String name, String description, Stem stem, boolean viewable) { this(id, name, description, viewable); this.stem = stem; } /** * @return the id */ public String getId() { return id; } /** * @return the name */ public String getName() { return name; } /** * @return the description */ public String getDescription() { return description; } /** * Converts all line endings with an HTML line break &lt;br/&gt; * * @return description with HTML line breaks, can be {@literal null} */ public String getDescriptionAsHtml() { if (description == null) { return null; } return StringEscapeUtils.escapeHtml(description).replaceAll("\n", "<br/>"); } /** * @return the members */ public List<Member> getMembers() { Collections.sort(members, new MemberComparator()); return members; } /** * Add a member * * @param member the new member */ public void addMembers(Member... member) { for (int i = 0; i < member.length; i++) { members.add(member[i]); } } /** * Remove members * * @param member varag of {@link Member} */ public void removeMembers(Member... member) { members.removeAll(Arrays.asList(member)); } /** * @param name the name to set */ public void setName(String name) { this.name = name; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @param role id of the person to assign the viewerRole to */ public void setViewerRole(Role role) { this.viewerRole = role; } /** * @return the viewerRole */ public Role getViewerRole() { return viewerRole; } /** * @param viewable the viewable to set */ public void setViewable(boolean viewable) { this.viewable = viewable; } /** * @return the viewable */ public boolean isViewable() { return viewable; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Team other = (Team) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } return true; } public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("id", id) .append("name", name) .toString(); } /** * Get the stem that this team belongs to * * @return {@link Stem} the stem that this team belongs to */ public Stem getStem() { return stem; } /** * Set the stem that this team belongs to * * @param stem {@link Stem} the stem that this team belongs to */ public void setStem(Stem stem) { this.stem = stem; } /** * Comparator to sort members by name */ class MemberComparator implements Comparator<Member> { @Override public int compare(Member member1, Member member2) { return member1.getName().compareToIgnoreCase(member2.getName()); } } /** * @return the numberOfMembers */ public int getNumberOfMembers() { return numberOfMembers; } /** * @param numberOfMembers the numberOfMembers to set */ public void setNumberOfMembers(int numberOfMembers) { this.numberOfMembers = numberOfMembers; } public void setId(String id) { this.id = id; } public void setMembers(List<Member> members) { this.members = members; } }
package com.andreashedin.infowallpaper; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlPullParserFactory; import android.content.res.AssetManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Log; public class WeatherDataCollector extends DataCollector { private static final String WEATHER_CURRENT_CONDITION = "#wcc"; private static final String WEATHER_CURRENT_CONDITION_ICON = "#wcci"; private static final String WEATHER_CURRENT_TEMPERATURE = "#wct"; private static final String WEATHER_HIGH = "#wh+"; private static final String WEATHER_LOW = "#wl+"; private static final String WEATHER_CONDITION = "#wc+"; private static final String WEATHER_CONDITION_ICON = "#wci+"; private static final String WEATHER_LOCATION = "#wloc"; String mLocation = ""; WeatherDataCollector(LiveInfoWallpaper parent) { super(parent); } private WeatherHandler.WeatherData[] mDays = new WeatherHandler.WeatherData[WeatherHandler.MAX_DAYS]; public static boolean contains(String str) { if(str.contains(WEATHER_CURRENT_CONDITION)) return true; if(str.contains(WEATHER_CURRENT_CONDITION_ICON)) return true; if(str.contains(WEATHER_CURRENT_TEMPERATURE)) return true; if(str.contains(WEATHER_LOCATION)) return true; if(str.contains(WEATHER_HIGH)) return true; if(str.contains(WEATHER_LOW)) return true; if(str.contains(WEATHER_CONDITION)) return true; if(str.contains(WEATHER_CONDITION_ICON)) return true; return false; } public static boolean useIcon(String str) { if(str.contains(WEATHER_CONDITION_ICON) || str.contains(WEATHER_CURRENT_CONDITION_ICON)) return true; return false; } public static String getSampleText(String in) { String out = in; if(in.contains(WEATHER_CURRENT_CONDITION_ICON) || in.contains(WEATHER_CONDITION_ICON)) { out = "#icon#"; } else { out = out.replace(WEATHER_CURRENT_CONDITION, "Condition"); out = out.replace(WEATHER_CURRENT_TEMPERATURE, "00"); out = out.replace(WEATHER_HIGH, "0"); out = out.replace(WEATHER_LOW, "0"); out = out.replace(WEATHER_CONDITION, "Condition"); out = out.replace(WEATHER_LOCATION, "City, Country"); } return out; } @Override String updateInfoString(String str, boolean numbersAsText) { String out = ""; String number = ""; if(useIcon(str) == false) { if(mDays[0] != null) { out = str.replace(WEATHER_CURRENT_CONDITION, mDays[0].condition); if(numbersAsText == true) { int i = Integer.parseInt(mDays[0].getTemp()); number = getNumberAsText(i); } else number = mDays[0].getTemp(); out = out.replace(WEATHER_CURRENT_TEMPERATURE, number); out = out.replace(WEATHER_LOCATION, mLocation); } out = doTodayPlusN(0, out, numbersAsText); out = doTodayPlusN(1, out, numbersAsText); out = doTodayPlusN(2, out, numbersAsText); out = doTodayPlusN(3, out, numbersAsText); } return out; } private String mLastIcon = ""; private Bitmap mWeatherIcon = null; public Bitmap getIcon(String str) { String file = ""; if(str.contains(WEATHER_CURRENT_CONDITION_ICON) && mDays[0] != null) { file = getIconFilename(mDays[0].condition); } else { int index = str.indexOf(WEATHER_CONDITION_ICON); if(index >= 0) { index += WEATHER_CONDITION_ICON.length(); int day = -1; try { Integer.parseInt(str.substring(index, index + 1)); } catch(NumberFormatException e) { } if(day >= 0 && day < WeatherHandler.MAX_DAYS) { if(mDays[day] != null) { file = getIconFilename(mDays[day].condition); } } } } if(file.length() > 0) { if(file != mLastIcon && mParent != null) { mLastIcon = file; try { AssetManager assets = mParent.getAssets(); InputStream is = assets.open("icons/" + file); mWeatherIcon = BitmapFactory.decodeStream(is); } catch(Exception e) { mLastIcon = ""; mWeatherIcon = null; } } } return mWeatherIcon; } String doTodayPlusN(int n, String str, boolean numbersAsText) { String out = str; String number = ""; if(n >= WeatherHandler.MAX_DAYS || mDays[n] == null) return out; String toReplace = WEATHER_CONDITION + n; out = out.replace(toReplace, mDays[n].condition); toReplace = WEATHER_HIGH + n; if(numbersAsText == true) { int i = Integer.parseInt(mDays[n].getHighTemp()); number = getNumberAsText(i); } else number = mDays[n].getHighTemp(); out = out.replace(toReplace, number); toReplace = WEATHER_LOW + n; if(numbersAsText == true) { int i = Integer.parseInt( mDays[n].getLowTemp()); number = getNumberAsText(i); } else number = mDays[n].getLowTemp(); out = out.replace(toReplace, number); return out; } @Override void update(Object object) { mLocation = WeatherHandler.instance().getLocation(); for(int i = 0; i < WeatherHandler.MAX_DAYS; ++i) { mDays[i] = WeatherHandler.instance().getData(i); mDays[i].originalCondition = mDays[i].condition; mDays[i].condition = getLocalizedString(mDays[i].originalCondition); } } String getLocalizedString(String str) { String out = str; if(str.equals("Clear")) out = mParent.getString(R.string.Clear_title); else if(str.equals("Cloudy")) out = mParent.getString(R.string.Cloudy_title); else if(str.equals("Fog")) out = mParent.getString(R.string.Fog_title); else if(str.equals("Haze")) out = mParent.getString(R.string.Haze_title); else if(str.equals("Light Rain")) out = mParent.getString(R.string.LightRain_title); else if(str.equals("Mostly Cloudy")) out = mParent.getString(R.string.MostlyCloudy_title); else if(str.equals("Mostly Sunny")) out = mParent.getString(R.string.MostlySunny_title); else if(str.equals("Overcast")) out = mParent.getString(R.string.Overcast_title); else if(str.equals("Partly Cloudy")) out = mParent.getString(R.string.PartlyCloudy_title); else if(str.equals("Rain")) out = mParent.getString(R.string.Rain_title); else if(str.equals("Rain Showers")) out = mParent.getString(R.string.RainShowers_title); else if(str.equals("Showers")) out = mParent.getString(R.string.Showers_title); else if(str.equals("Thunderstorm")) out = mParent.getString(R.string.Thunderstorm_title); else if(str.equals("Chance of Showers")) out = mParent.getString(R.string.ChanceofShowers_title); else if(str.equals("Chance of Storm")) out = mParent.getString(R.string.ChanceofStorm_title); else if(str.equals("Chance of Snow")) out = mParent.getString(R.string.ChanceofSnow_title); else if(str.equals("Chance of Rain")) out = mParent.getString(R.string.ChanceofRain_title); else if(str.equals("Partly Sunny")) out = mParent.getString(R.string.PartlySunny_title); else if(str.equals("Scattered Showers")) out = mParent.getString(R.string.ScatteredShowers_title); else if(str.equals("Sunny")) out = mParent.getString(R.string.Sunny_title); return out; } public String getIconFilename(String str) { String file = ""; str = str.toLowerCase().replace(" ", "_"); file = String.format(WeatherHandler.instance().getIconSet(), str); return file; } }
package org.jsonstructure.jackson.validator.simpleregexp; // Generated from SimpleRegexp.g4 by ANTLR 4.6 import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.TerminalNode; /** * This class provides an empty implementation of {@link SimpleRegexpListener}, * which can be extended to create a listener which only needs to handle a subset * of the available methods. */ public class SimpleRegexpBaseListener implements SimpleRegexpListener { /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterStart(SimpleRegexpParser.StartContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitStart(SimpleRegexpParser.StartContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClass(SimpleRegexpParser.CharacterClassContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClass(SimpleRegexpParser.CharacterClassContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClassExpression(SimpleRegexpParser.CharacterClassExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClassExpression(SimpleRegexpParser.CharacterClassExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClassRange(SimpleRegexpParser.CharacterClassRangeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClassRange(SimpleRegexpParser.CharacterClassRangeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClassAtom(SimpleRegexpParser.CharacterClassAtomContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClassAtom(SimpleRegexpParser.CharacterClassAtomContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClassEscaped(SimpleRegexpParser.CharacterClassEscapedContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClassEscaped(SimpleRegexpParser.CharacterClassEscapedContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterQuantifier(SimpleRegexpParser.QuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitQuantifier(SimpleRegexpParser.QuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterSimpleQuantifier(SimpleRegexpParser.SimpleQuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitSimpleQuantifier(SimpleRegexpParser.SimpleQuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRangeQuantifier(SimpleRegexpParser.RangeQuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRangeQuantifier(SimpleRegexpParser.RangeQuantifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRangeQuantifierExpression(SimpleRegexpParser.RangeQuantifierExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRangeQuantifierExpression(SimpleRegexpParser.RangeQuantifierExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRangeQuantifierExact(SimpleRegexpParser.RangeQuantifierExactContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRangeQuantifierExact(SimpleRegexpParser.RangeQuantifierExactContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRangeQuantifierMinMax(SimpleRegexpParser.RangeQuantifierMinMaxContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRangeQuantifierMinMax(SimpleRegexpParser.RangeQuantifierMinMaxContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterRangeQuantifierMin(SimpleRegexpParser.RangeQuantifierMinContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitRangeQuantifierMin(SimpleRegexpParser.RangeQuantifierMinContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterNumber(SimpleRegexpParser.NumberContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitNumber(SimpleRegexpParser.NumberContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTerm(SimpleRegexpParser.TermContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTerm(SimpleRegexpParser.TermContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterParenExpression(SimpleRegexpParser.ParenExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitParenExpression(SimpleRegexpParser.ParenExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterOrExpression(SimpleRegexpParser.OrExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitOrExpression(SimpleRegexpParser.OrExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExpression(SimpleRegexpParser.ExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExpression(SimpleRegexpParser.ExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAtomEscaped(SimpleRegexpParser.AtomEscapedContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAtomEscaped(SimpleRegexpParser.AtomEscapedContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAtom(SimpleRegexpParser.AtomContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAtom(SimpleRegexpParser.AtomContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCharacterClassSimple(SimpleRegexpParser.CharacterClassSimpleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCharacterClassSimple(SimpleRegexpParser.CharacterClassSimpleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEveryRule(ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEveryRule(ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitTerminal(TerminalNode node) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitErrorNode(ErrorNode node) { } }
/******************************************************************************* * * Copyright 2015 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.client.hbase.generatedId; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.GenerationType; import javax.persistence.Persistence; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.Test; import com.impetus.client.hbase.HBaseClient; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdDefault; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdStrategyAuto; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdStrategyIdentity; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdStrategySequence; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdStrategyTable; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdWithOutSequenceGenerator; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdWithOutTableGenerator; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdWithSequenceGenerator; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdWithTableGenerator; import com.impetus.client.hbase.generatedId.entites.HBaseGeneratedIdWithTableGeneratorWihtoutInit; import com.impetus.client.hbase.testingutil.HBaseTestingUtils; import com.impetus.kundera.KunderaException; /** * The Class HBaseGeneratedIdTest. * * @author Pragalbh Garg */ public class HBaseGeneratedIdTest { /** The Constant SCHEMA. */ private static final String SCHEMA = "HBaseNew"; /** The Constant HBASE_PU. */ private static final String HBASE_PU = "autoIdTest"; /** The emf. */ private EntityManagerFactory emf; /** * Sets the up. * * @throws Exception * the exception */ @Before public void setUp() throws Exception { emf = Persistence.createEntityManagerFactory(HBASE_PU); } /** * Tear down. * * @throws Exception * the exception */ @After public void tearDown() throws Exception { emf.close(); HBaseTestingUtils.dropSchema(SCHEMA); } /** * Test persist. */ @Test public void testPersist() { EntityManager em = emf.createEntityManager(); try { HBaseGeneratedIdDefault idDefault = new HBaseGeneratedIdDefault(); idDefault.setName("kuldeep"); em.persist(idDefault); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals("java.lang.IllegalArgumentException: " + GenerationType.class.getSimpleName() + "." + GenerationType.AUTO + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } try { HBaseGeneratedIdStrategyAuto strategyAuto = new HBaseGeneratedIdStrategyAuto(); strategyAuto.setName("kuldeep"); em.persist(strategyAuto); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals("java.lang.IllegalArgumentException: " + GenerationType.class.getSimpleName() + "." + GenerationType.AUTO + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } try { HBaseGeneratedIdStrategyIdentity strategyIdentity = new HBaseGeneratedIdStrategyIdentity(); strategyIdentity.setName("kuldeep"); em.persist(strategyIdentity); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals( "java.lang.UnsupportedOperationException: " + GenerationType.class.getSimpleName() + "." + GenerationType.IDENTITY + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } HBaseGeneratedIdStrategySequence strategySequence = new HBaseGeneratedIdStrategySequence(); strategySequence.setName("Kuldeep"); try { em.persist(strategySequence); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals( "java.lang.IllegalArgumentException: " + GenerationType.class.getSimpleName() + "." + GenerationType.SEQUENCE + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } try { HBaseGeneratedIdStrategyTable strategyTable1 = new HBaseGeneratedIdStrategyTable(); strategyTable1.setName("KK"); em.persist(strategyTable1); HBaseGeneratedIdStrategyTable strategyTable2 = new HBaseGeneratedIdStrategyTable(); strategyTable2.setName("vm"); em.persist(strategyTable2); HBaseGeneratedIdStrategyTable strategyTable3 = new HBaseGeneratedIdStrategyTable(); strategyTable3.setName("vs"); em.persist(strategyTable3); HBaseGeneratedIdStrategyTable strategyTable = new HBaseGeneratedIdStrategyTable(); strategyTable.setName("sh"); em.persist(strategyTable); List<HBaseGeneratedIdStrategyTable> list = em.createQuery("Select c from HBaseGeneratedIdStrategyTable c") .getResultList(); Assert.assertNotNull(list); Assert.assertEquals(4, list.size()); for (HBaseGeneratedIdStrategyTable entity : list) { Assert.assertTrue(entity.getId() == 1 || entity.getId() == 51 || entity.getId() == 101 || entity.getId() == 151); } em.clear(); strategyTable = em.find(HBaseGeneratedIdStrategyTable.class, strategyTable.getId()); Assert.assertNotNull(strategyTable); Assert.assertEquals("sh", strategyTable.getName()); } catch (KunderaException e) { Assert.fail(); } try { HBaseGeneratedIdWithOutSequenceGenerator withOutSequenceGenerator = new HBaseGeneratedIdWithOutSequenceGenerator(); withOutSequenceGenerator.setName("Kuldeep Kumar"); em.persist(withOutSequenceGenerator); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals( "java.lang.IllegalArgumentException: " + GenerationType.class.getSimpleName() + "." + GenerationType.SEQUENCE + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } try { HBaseGeneratedIdWithOutTableGenerator withOutTableGenerator = new HBaseGeneratedIdWithOutTableGenerator(); withOutTableGenerator.setName("Kuldeep Mishra"); em.persist(withOutTableGenerator); List<HBaseGeneratedIdWithOutTableGenerator> list = em.createQuery( "Select c from HBaseGeneratedIdWithOutTableGenerator c").getResultList(); Assert.assertNotNull(list); Assert.assertEquals(1, list.size()); Assert.assertEquals("Kuldeep Mishra", list.get(0).getName()); Object id = list.get(0).getId(); em.clear(); withOutTableGenerator = em.find(HBaseGeneratedIdWithOutTableGenerator.class, id); Assert.assertNotNull(withOutTableGenerator); Assert.assertEquals("Kuldeep Mishra", withOutTableGenerator.getName()); } catch (KunderaException e) { Assert.fail(); } try { HBaseGeneratedIdWithSequenceGenerator withSequenceGenerator = new HBaseGeneratedIdWithSequenceGenerator(); withSequenceGenerator.setName("Kuldeep Kumar Mishra"); em.persist(withSequenceGenerator); Assert.fail(); } catch (KunderaException e) { Assert.assertEquals( "java.lang.IllegalArgumentException: " + GenerationType.class.getSimpleName() + "." + GenerationType.SEQUENCE + " Strategy not supported by this client :" + HBaseClient.class.getName(), e.getMessage()); } try { // Test with initValue paramater = 100 allocationsize = 30 HBaseGeneratedIdWithTableGenerator withTableGenerator = new HBaseGeneratedIdWithTableGenerator(); withTableGenerator.setName("Kumar Mishra"); em.persist(withTableGenerator); List<HBaseGeneratedIdWithTableGenerator> list = em.createQuery( "Select c from HBaseGeneratedIdWithTableGenerator c").getResultList(); Assert.assertNotNull(list); Assert.assertEquals(1, list.size()); Assert.assertEquals("Kumar Mishra", list.get(0).getName()); Object id = list.get(0).getId(); Assert.assertEquals(100, id); HBaseGeneratedIdWithTableGenerator withTableGenerator2 = new HBaseGeneratedIdWithTableGenerator(); withTableGenerator2.setName("Kumar Mishra2"); em.persist(withTableGenerator2); list = em.createQuery("Select c from HBaseGeneratedIdWithTableGenerator c").getResultList(); Assert.assertEquals(2, list.size()); id = list.get(1).getId(); Assert.assertEquals(100, list.get(0).getId()); Assert.assertEquals(130, list.get(1).getId()); em.clear(); withTableGenerator = em.find(HBaseGeneratedIdWithTableGenerator.class, id); Assert.assertNotNull(withTableGenerator); Assert.assertEquals("Kumar Mishra2", withTableGenerator.getName()); // Test without initValue paramater allocationsize = 30 HBaseGeneratedIdWithTableGeneratorWihtoutInit person1 = new HBaseGeneratedIdWithTableGeneratorWihtoutInit(); person1.setName("pragalbh"); em.persist(person1); HBaseGeneratedIdWithTableGeneratorWihtoutInit person2 = new HBaseGeneratedIdWithTableGeneratorWihtoutInit(); person2.setName("pragalbh2"); em.persist(person2); List<HBaseGeneratedIdWithTableGeneratorWihtoutInit> results = em.createQuery( "Select c from HBaseGeneratedIdWithTableGeneratorWihtoutInit c").getResultList(); Assert.assertTrue(results.get(0).getId() == 1 || results.get(0).getId() == 51); Assert.assertTrue(results.get(1).getId() == 1 || results.get(1).getId() == 51); } catch (KunderaException e) { Assert.fail(); } } }
// Copyright (c) Keith D Gregory // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.kdgregory.logging.test; import net.sf.kdgcommons.lang.ClassUtil; import static net.sf.kdgcommons.test.StringAsserts.*; import static org.junit.Assert.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.services.logs.AWSLogs; import com.amazonaws.services.logs.AWSLogsClientBuilder; import com.kdgregory.logging.aws.cloudwatch.CloudWatchLogWriter; import com.kdgregory.logging.aws.cloudwatch.CloudWatchWriterStatistics; import com.kdgregory.logging.aws.facade.CloudWatchFacade; import com.kdgregory.logging.testhelpers.CloudWatchTestHelper; import com.kdgregory.logging.testhelpers.CommonTestHelper; import com.kdgregory.logging.testhelpers.MessageWriter; /** * This class contains all of the actual test code for the CloudWatch * integration tests. Subclass tests initialize the logging framework * and then call the like-named method here. */ public abstract class AbstractCloudWatchAppenderIntegrationTest { // change these if you change the config protected final static String BASE_LOGGROUP_NAME = "AppenderIntegrationTest"; protected final static String LOGSTREAM_BASE = "AppenderTest"; // initialized here, and again by init() after the logging framework has been initialized protected Logger localLogger = LoggerFactory.getLogger(getClass()); // this client is shared by all tests protected static AWSLogs helperClient; // this one is used solely by the static factory test protected static AWSLogs factoryClient; // this one is used by the alternate region test protected AWSLogs altClient; protected CloudWatchTestHelper testHelper; //---------------------------------------------------------------------------- // Helpers //---------------------------------------------------------------------------- /** * Subclasses must implement this to give the common tests access to the * logger components. */ public interface LoggerAccessor { /** * Creates a new Messagewriter that will log to the tested appender. */ MessageWriter newMessageWriter(int numMessages); /** * Retrieves the current writer from the tested appender. */ CloudWatchLogWriter getWriter() throws Exception; /** * Returns the statistics object associated with the tested appender. */ CloudWatchWriterStatistics getStats(); /** * Identifies whether the appender supports post-creation config changes * (used in the smoketest). */ boolean supportsConfigurationChanges(); /** * Changes the appender's batch delay, iff if supports such changes. */ void setBatchDelay(long value); } /** * This function is used by testFactoryMethod(). */ public static AWSLogs createClient() { factoryClient = AWSLogsClientBuilder.defaultClient(); return factoryClient; } //---------------------------------------------------------------------------- // JUnit Scaffolding -- must be overridden by subclasses (I'm assuming that // JUnit doesn't go out of its way to find annotations on superclasses) //---------------------------------------------------------------------------- public static void beforeClass() throws Exception { helperClient = AWSLogsClientBuilder.defaultClient(); } public void tearDown() throws Exception { // set by single test, but easier to reset always (if needed) if (factoryClient != null) { factoryClient.shutdown(); factoryClient = null; } // set by single test, but easier to reset always (if needed) if (altClient != null) { altClient.shutdown(); altClient = null; } localLogger.info("finished"); } public static void afterClass() throws Exception { if (helperClient != null) { helperClient.shutdown(); } } //---------------------------------------------------------------------------- // Test Bodies //---------------------------------------------------------------------------- protected void smoketest(LoggerAccessor accessor) throws Exception { // configured values; should be the same for all frameworks final int numMessages = 1001; MessageWriter messageWriter = accessor.newMessageWriter(numMessages); messageWriter.run(); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); assertEquals("stats: actual log group name", "AppenderIntegrationTest-smoketest", accessor.getStats().getActualLogGroupName()); assertRegex("stats: actual log stream name", LOGSTREAM_BASE + "-\\d{12}", accessor.getStats().getActualLogStreamName()); assertEquals("stats: messages written", numMessages, accessor.getStats().getMessagesSent()); // while we're here, verify some more of the plumbing assertNull("factory should not have been used to create client", factoryClient); assertEquals("retention period", 7, testHelper.describeLogGroup().getRetentionInDays().intValue()); if (accessor.supportsConfigurationChanges()) { accessor.setBatchDelay(1234L); assertEquals("batch delay", 1234L, accessor.getWriter().getBatchDelay()); } testHelper.deleteLogGroupIfExists(); } protected void testMultipleThreadsSingleAppender(LoggerAccessor accessor) throws Exception { // configured values; should be the same for all frameworks final int messagesPerThread = 200; final int numMessages = messagesPerThread * 5; MessageWriter[] messageWriters = new MessageWriter[] { accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread) }; MessageWriter.runOnThreads(messageWriters); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); testHelper.assertMessages(LOGSTREAM_BASE, numMessages); testHelper.deleteLogGroupIfExists(); } protected void testMultipleThreadsMultipleAppendersDifferentDestinations(LoggerAccessor... accessors) throws Exception { // configured values; should be the same for all frameworks final int messagesPerThread = 1000; MessageWriter.runOnThreads( accessors[0].newMessageWriter(messagesPerThread), accessors[1].newMessageWriter(messagesPerThread), accessors[2].newMessageWriter(messagesPerThread)); localLogger.info("waiting for loggers"); CommonTestHelper.waitUntilMessagesSent(accessors[0].getStats(), messagesPerThread, 30000); CommonTestHelper.waitUntilMessagesSent(accessors[1].getStats(), messagesPerThread, 30000); CommonTestHelper.waitUntilMessagesSent(accessors[2].getStats(), messagesPerThread, 30000); testHelper.assertMessages(LOGSTREAM_BASE + "-1", messagesPerThread); testHelper.assertMessages(LOGSTREAM_BASE + "-2", messagesPerThread); testHelper.assertMessages(LOGSTREAM_BASE + "-3", messagesPerThread); testHelper.deleteLogGroupIfExists(); } @SuppressWarnings("unused") protected void testMultipleThreadsMultipleAppendersSameDestination(LoggerAccessor... accessors) throws Exception { // configured values; should be the same for all frameworks final int messagesPerThread = 1000; MessageWriter.runOnThreads( accessors[0].newMessageWriter(messagesPerThread), accessors[1].newMessageWriter(messagesPerThread), accessors[2].newMessageWriter(messagesPerThread), accessors[3].newMessageWriter(messagesPerThread), accessors[4].newMessageWriter(messagesPerThread), accessors[0].newMessageWriter(messagesPerThread), accessors[1].newMessageWriter(messagesPerThread), accessors[2].newMessageWriter(messagesPerThread), accessors[3].newMessageWriter(messagesPerThread), accessors[4].newMessageWriter(messagesPerThread), accessors[0].newMessageWriter(messagesPerThread), accessors[1].newMessageWriter(messagesPerThread), accessors[2].newMessageWriter(messagesPerThread), accessors[3].newMessageWriter(messagesPerThread), accessors[4].newMessageWriter(messagesPerThread), accessors[0].newMessageWriter(messagesPerThread), accessors[1].newMessageWriter(messagesPerThread), accessors[2].newMessageWriter(messagesPerThread), accessors[3].newMessageWriter(messagesPerThread), accessors[4].newMessageWriter(messagesPerThread)); localLogger.info("waiting for loggers"); for (LoggerAccessor accessor : accessors) { CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), messagesPerThread * 4, 30000); } // even after waiting until the stats say we've written everything, the read won't succeed // if we try it immediately ... so we sleep, while CloudWatch puts everything in its place Thread.sleep(10000); testHelper.assertMessages(LOGSTREAM_BASE, messagesPerThread * 20); int messageCountFromStats = 0; int messagesDiscardedFromStats = 0; int raceRetriesFromStats = 0; int unrecoveredRaceRetriesFromStats = 0; boolean raceReportedInStats = false; String lastNonRaceErrorFromStats = null; for (LoggerAccessor accessor : accessors) { CloudWatchWriterStatistics stats = accessor.getStats(); messageCountFromStats += stats.getMessagesSent(); messagesDiscardedFromStats += stats.getMessagesDiscarded(); raceRetriesFromStats += stats.getWriterRaceRetries(); unrecoveredRaceRetriesFromStats += stats.getUnrecoveredWriterRaceRetries(); String lastErrorMessage = stats.getLastErrorMessage(); if (lastErrorMessage != null) { if (lastErrorMessage.contains("InvalidSequenceTokenException")) raceReportedInStats = true; else lastNonRaceErrorFromStats = lastErrorMessage; } } assertEquals("stats: message count", messagesPerThread * 20, messageCountFromStats); assertEquals("stats: messages discarded", 0, messagesDiscardedFromStats); // manually enable these two assertions -- this test does not reliably create a race retry since 2.0.2 // assertTrue("stats: race retries", raceRetriesFromStats > 0); // assertEquals("stats: all race retries recovered", 0, unrecoveredRaceRetriesFromStats); assertNull("stats: last error (was: " + lastNonRaceErrorFromStats + ")", lastNonRaceErrorFromStats); testHelper.deleteLogGroupIfExists(); } protected void testLogstreamDeletionAndRecreation(LoggerAccessor accessor) throws Exception { final String streamName = LOGSTREAM_BASE; final int numMessages = 100; localLogger.info("writing first batch"); accessor.newMessageWriter(numMessages).run(); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); testHelper.assertMessages(streamName, numMessages); localLogger.info("deleting stream"); testHelper.deleteLogStream(streamName); localLogger.info("writing second batch (framework may report error)"); accessor.newMessageWriter(numMessages).run(); // the original batch of messages will be gone, so we can assert the new batch was written // however, the writer doesn't change so the stats will keep increasing CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages * 2, 30000); testHelper.assertMessages(streamName, numMessages); assertEquals("all messages reported in stats", numMessages * 2, accessor.getStats().getMessagesSent()); testHelper.deleteLogGroupIfExists(); } protected void testFactoryMethod(LoggerAccessor accessor) throws Exception { final int numMessages = 1001; localLogger.info("writing messages"); accessor.newMessageWriter(numMessages).run(); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); testHelper.assertMessages(LOGSTREAM_BASE, numMessages); CloudWatchFacade facade = ClassUtil.getFieldValue(accessor.getWriter(), "facade", CloudWatchFacade.class); AWSLogs client = ClassUtil.getFieldValue(facade, "client", AWSLogs.class); assertSame("factory should have been used to create client", factoryClient, client); testHelper.deleteLogGroupIfExists(); } protected void testAlternateRegion(LoggerAccessor accessor, CloudWatchTestHelper altTestHelper) throws Exception { final int numMessages = 1001; localLogger.info("writing messages"); accessor.newMessageWriter(numMessages).run(); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); altTestHelper.assertMessages(LOGSTREAM_BASE, numMessages); assertFalse("logstream does not exist in default region", testHelper.isLogStreamAvailable(LOGSTREAM_BASE)); altTestHelper.deleteLogGroupIfExists(); } protected void testAlternateEndpoint(LoggerAccessor accessor, CloudWatchTestHelper altTestHelper) throws Exception { final int numMessages = 1001; localLogger.info("writing messages"); accessor.newMessageWriter(numMessages).run(); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); altTestHelper.assertMessages(LOGSTREAM_BASE, numMessages); assertFalse("logstream does not exist in default region", testHelper.isLogStreamAvailable(LOGSTREAM_BASE)); altTestHelper.deleteLogGroupIfExists(); } protected void testAssumedRole(LoggerAccessor accessor) throws Exception { final int numMessages = 1001; localLogger.info("writing messages"); accessor.newMessageWriter(numMessages).run(); localLogger.info("waiting for logger"); CommonTestHelper.waitUntilMessagesSent(accessor.getStats(), numMessages, 30000); testHelper.assertMessages(LOGSTREAM_BASE, numMessages); assertEquals("credentials provider", STSAssumeRoleSessionCredentialsProvider.class, CommonTestHelper.getCredentialsProviderClass(accessor.getWriter()) ); testHelper.deleteLogGroupIfExists(); } protected void testSynchronousModeSingleThread(LoggerAccessor accessor) throws Exception { localLogger.info("writing message"); accessor.newMessageWriter(1).run(); assertEquals("number of messages recorded in stats", 1, accessor.getStats().getMessagesSent()); testHelper.assertMessages(LOGSTREAM_BASE, 1); testHelper.deleteLogGroupIfExists(); } protected void testSynchronousModeMultiThread(LoggerAccessor accessor) throws Exception { // if we do too many messages we get throttled ... this will be a problem for real-world use final int messagesPerThread = 5; MessageWriter[] writers = new MessageWriter[] { accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread), accessor.newMessageWriter(messagesPerThread) }; MessageWriter.runOnThreads(writers); assertEquals("number of messages recorded in stats", messagesPerThread * 5, accessor.getStats().getMessagesSent()); testHelper.assertMessages(LOGSTREAM_BASE, messagesPerThread * 5); testHelper.deleteLogGroupIfExists(); } }
/* * FILE: ShapefileRDDTest * Copyright (c) 2015 - 2018 GeoSpark Development Team * * MIT License * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * */ package org.datasyslab.geospark.formatMapper.shapefileParser.shapes; import com.vividsolutions.jts.geom.Envelope; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Polygon; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.datasyslab.geospark.formatMapper.shapefileParser.ShapefileRDD; import org.datasyslab.geospark.formatMapper.shapefileParser.boundary.BoundBox; import org.datasyslab.geospark.spatialOperator.RangeQuery; import org.datasyslab.geospark.spatialRDD.LineStringRDD; import org.datasyslab.geospark.spatialRDD.PointRDD; import org.datasyslab.geospark.spatialRDD.PolygonRDD; import org.geotools.data.DataStore; import org.geotools.data.DataStoreFinder; import org.geotools.data.FeatureSource; import org.geotools.data.shapefile.files.ShpFiles; import org.geotools.data.shapefile.shp.ShapefileReader; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureIterator; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import org.opengis.filter.Filter; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; public class ShapefileRDDTest implements Serializable { /** * The sc. */ public static JavaSparkContext sc; /** * The Input location. */ public static String InputLocation; @BeforeClass public static void onceExecutedBeforeAll() { SparkConf conf = new SparkConf().setAppName("ShapefileRDDTest").setMaster("local[2]").set("spark.executor.cores", "2"); sc = new JavaSparkContext(conf); Logger.getLogger("org").setLevel(Level.WARN); Logger.getLogger("akka").setLevel(Level.WARN); //Hard code to a file in resource folder. But you can replace it later in the try-catch field in your hdfs system. } @AfterClass public static void tearDown() throws Exception { sc.stop(); } /** * Test if shapeRDD get correct number of shapes from .shp file * * @throws IOException */ @Test public void testLoadShapeFile() throws IOException { // load shape with geotool.shapefile InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/polygon").getPath(); File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); // load shapes with our tool ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); Assert.assertEquals(shapefileRDD.getShapeRDD().collect().size(), collection.size()); dataStore.dispose(); } /** * test if shapeRDD load .shp fie with shape type = Polygon correctly. * * @throws IOException */ @Test public void testLoadShapeFilePolygon() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/polygon").getPath(); // load shape with geotool.shapefile File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); FeatureIterator<SimpleFeature> features = collection.features(); ArrayList<String> featureTexts = new ArrayList<String>(); while (features.hasNext()) { SimpleFeature feature = features.next(); Object geometry = feature.getDefaultGeometry(); if (geometry instanceof MultiPolygon) { MultiPolygon multiPolygon = (MultiPolygon) geometry; if (multiPolygon.getNumGeometries() == 1) { geometry = multiPolygon.getGeometryN(0); } } featureTexts.add(String.valueOf(geometry)); } features.close(); final Iterator<String> featureIterator = featureTexts.iterator(); ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); PolygonRDD spatialRDD = new PolygonRDD(shapefileRDD.getPolygonRDD()); try { RangeQuery.SpatialRangeQuery(spatialRDD, new Envelope(-180, 180, -90, 90), false, false).count(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } for (Geometry geometry : shapefileRDD.getShapeRDD().collect()) { Assert.assertEquals(featureIterator.next(), geometry.toText()); } dataStore.dispose(); } /** * test if shapeRDD load .shp fie with shape type = PolyLine correctly. * * @throws IOException */ @Test public void testLoadShapeFilePolyLine() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/polyline").getPath(); // load shape with geotool.shapefile File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); FeatureIterator<SimpleFeature> features = collection.features(); ArrayList<String> featureTexts = new ArrayList<String>(); while (features.hasNext()) { SimpleFeature feature = features.next(); featureTexts.add(String.valueOf(feature.getDefaultGeometry())); } features.close(); final Iterator<String> featureIterator = featureTexts.iterator(); ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); LineStringRDD spatialRDD = new LineStringRDD(shapefileRDD.getLineStringRDD()); try { RangeQuery.SpatialRangeQuery(spatialRDD, new Envelope(-180, 180, -90, 90), false, false).count(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } for (Geometry geometry : shapefileRDD.getShapeRDD().collect()) { Assert.assertEquals(featureIterator.next(), geometry.toText()); } dataStore.dispose(); } /** * Test if shapeRDD load shape type = MultiPoint correctly. * * @throws IOException */ @Test public void testLoadShapeFileMultiPoint() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/multipoint").getPath(); // load shape with geotool.shapefile File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); FeatureIterator<SimpleFeature> features = collection.features(); ArrayList<String> featureTexts = new ArrayList<String>(); while (features.hasNext()) { SimpleFeature feature = features.next(); featureTexts.add(String.valueOf(feature.getDefaultGeometry())); } features.close(); final Iterator<String> featureIterator = featureTexts.iterator(); ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); for (Geometry geometry : shapefileRDD.getShapeRDD().collect()) { Assert.assertEquals(featureIterator.next(), geometry.toText()); } dataStore.dispose(); } /** * Test if shapeRDD load shape type = Point correctly. * * @throws IOException */ @Test public void testLoadShapeFilePoint() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/point").getPath(); // load shape with geotool.shapefile File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); FeatureIterator<SimpleFeature> features = collection.features(); ArrayList<String> featureTexts = new ArrayList<String>(); while (features.hasNext()) { SimpleFeature feature = features.next(); featureTexts.add(String.valueOf(feature.getDefaultGeometry())); } features.close(); final Iterator<String> featureIterator = featureTexts.iterator(); ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); PointRDD spatialRDD = new PointRDD(shapefileRDD.getPointRDD()); try { RangeQuery.SpatialRangeQuery(spatialRDD, new Envelope(-180, 180, -90, 90), false, false).count(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } for (Geometry geometry : shapefileRDD.getShapeRDD().collect()) { Assert.assertEquals(featureIterator.next(), geometry.toText()); } dataStore.dispose(); } /** * Test if shapeRDD load .dbf file correctly * * @throws IOException */ @Test public void testLoadDbfFile() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/dbf").getPath(); // load shape with geotool.shapefile File file = new File(InputLocation); Map<String, Object> map = new HashMap<String, Object>(); map.put("url", file.toURI().toURL()); DataStore dataStore = DataStoreFinder.getDataStore(map); String typeName = dataStore.getTypeNames()[0]; FeatureSource<SimpleFeatureType, SimpleFeature> source = dataStore .getFeatureSource(typeName); Filter filter = Filter.INCLUDE; FeatureCollection<SimpleFeatureType, SimpleFeature> collection = source.getFeatures(filter); FeatureIterator<SimpleFeature> features = collection.features(); ArrayList<String> featureTexts = new ArrayList<String>(); while (features.hasNext()) { SimpleFeature feature = features.next(); Object geometry = feature.getDefaultGeometry(); if (geometry instanceof MultiPolygon) { MultiPolygon multiPolygon = (MultiPolygon) geometry; if (multiPolygon.getNumGeometries() == 1) { geometry = multiPolygon.getGeometryN(0); } } featureTexts.add(String.valueOf(geometry)); } features.close(); final Iterator<String> featureIterator = featureTexts.iterator(); ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); PolygonRDD spatialRDD = new PolygonRDD(shapefileRDD.getPolygonRDD()); try { RangeQuery.SpatialRangeQuery(spatialRDD, new Envelope(-180, 180, -90, 90), false, false).count(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } assert ((Polygon) spatialRDD.rawSpatialRDD.take(1).get(0)).getUserData().equals("20\t175\t00485050\t0500000US20175\t20175\tSeward\t06\t1655865960\t2777350"); for (Geometry geometry : shapefileRDD.getShapeRDD().collect()) { Assert.assertEquals(featureIterator.next(), geometry.toText()); } dataStore.dispose(); } /** * Test if parse the boundary in header correctly * * @throws IOException */ @Test public void testParseBoundary() throws IOException { InputLocation = ShapefileRDDTest.class.getClassLoader().getResource("shapefiles/dbf").getPath(); // load shapefile with geotools's reader ShpFiles shpFile = new ShpFiles(InputLocation + "/map.shp"); GeometryFactory geometryFactory = new GeometryFactory(); ShapefileReader gtlReader = new ShapefileReader(shpFile, false, true, geometryFactory); String gtlbounds = gtlReader.getHeader().minX() + ":" + gtlReader.getHeader().minY() + ":" + gtlReader.getHeader().maxX() + ":" + gtlReader.getHeader().maxY(); // read shapefile by our reader ShapefileRDD shapefileRDD = new ShapefileRDD(sc, InputLocation); shapefileRDD.count(); BoundBox bounds = shapefileRDD.getBoundBox(sc, InputLocation); String myBounds = bounds.getXMin() + ":" + bounds.getYMin() + ":" + bounds.getXMax() + ":" + bounds.getYMax(); Assert.assertEquals(gtlbounds, myBounds); gtlReader.close(); } }
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import static azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_GROUP_NAME; import static azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_NATIVE_LIB_FOLDER; import static azkaban.ServiceProvider.SERVICE_PROVIDER; import azkaban.Constants; import azkaban.Constants.JobProperties; import azkaban.flow.CommonJobProperties; import azkaban.jobExecutor.utils.process.AzkabanProcess; import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder; import azkaban.metrics.CommonMetrics; import azkaban.utils.ExecuteAsUser; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.SystemMemoryInfo; import azkaban.utils.Utils; import com.google.common.annotations.VisibleForTesting; import java.io.File; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; /** * A job that runs a simple unix command */ public class ProcessJob extends AbstractProcessJob { public static final String COMMAND = "command"; public static final String AZKABAN_MEMORY_CHECK = "azkaban.memory.check"; // Use azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_NATIVE_LIB_FOLDER instead @Deprecated public static final String NATIVE_LIB_FOLDER = "azkaban.native.lib"; public static final String EXECUTE_AS_USER = "execute.as.user"; public static final String KRB5CCNAME = "KRB5CCNAME"; private static final Duration KILL_TIME = Duration.ofSeconds(30); private static final String MEMCHECK_ENABLED = "memCheck.enabled"; private static final String CHOWN = "/bin/chown"; private static final String CREATE_FILE = "touch"; private static final int SUCCESSFUL_EXECUTION = 0; private static final String TEMP_FILE_NAME = "user_can_write"; private final CommonMetrics commonMetrics; private volatile AzkabanProcess process; private volatile boolean killed = false; // For testing only. True if the job process exits successfully. private volatile boolean success; public ProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Logger log) { super(jobId, sysProps, jobProps, log); // TODO: reallocf fully guicify CommonMetrics through ProcessJob dependents this.commonMetrics = SERVICE_PROVIDER.getInstance(CommonMetrics.class); } public ProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Props privateProps, final Logger log) { super(jobId, sysProps, jobProps, privateProps, log); // TODO: reallocf fully guicify CommonMetrics through ProcessJob dependents this.commonMetrics = SERVICE_PROVIDER.getInstance(CommonMetrics.class); } /** * Splits the command into a unix like command line structure. Quotes and single quotes are * treated as nested strings. */ public static String[] partitionCommandLine(final String command) { final ArrayList<String> commands = new ArrayList<>(); int index = 0; StringBuffer buffer = new StringBuffer(command.length()); boolean isApos = false; boolean isQuote = false; while (index < command.length()) { final char c = command.charAt(index); switch (c) { case ' ': if (!isQuote && !isApos) { final String arg = buffer.toString(); buffer = new StringBuffer(command.length() - index); if (arg.length() > 0) { commands.add(arg); } } else { buffer.append(c); } break; case '\'': if (!isQuote) { isApos = !isApos; } else { buffer.append(c); } break; case '"': if (!isApos) { isQuote = !isQuote; } else { buffer.append(c); } break; default: buffer.append(c); } index++; } if (buffer.length() > 0) { final String arg = buffer.toString(); commands.add(arg); } return commands.toArray(new String[commands.size()]); } @Override public void run() throws Exception { try { resolveProps(); } catch (final Exception e) { handleError("Bad property definition! " + e.getMessage(), e); } if (this.getSysProps().getBoolean(MEMCHECK_ENABLED, true) && this.getJobProps().getBoolean(AZKABAN_MEMORY_CHECK, true)) { final Pair<Long, Long> memPair = getProcMemoryRequirement(); final long xms = memPair.getFirst(); final long xmx = memPair.getSecond(); // retry backoff in ms final String oomMsg = String .format("Cannot request memory (Xms %d kb, Xmx %d kb) from system for job %s", xms, xmx, getId()); int attempt; boolean isMemGranted = true; //todo HappyRay: move to proper Guice after this class is refactored. final SystemMemoryInfo memInfo = SERVICE_PROVIDER.getInstance(SystemMemoryInfo.class); for (attempt = 1; attempt <= Constants.MEMORY_CHECK_RETRY_LIMIT; attempt++) { isMemGranted = memInfo.canSystemGrantMemory(xmx); if (isMemGranted) { info(String.format("Memory granted for job %s", getId())); if (attempt > 1) { this.commonMetrics.decrementOOMJobWaitCount(); } break; } if (attempt < Constants.MEMORY_CHECK_RETRY_LIMIT) { info(String.format(oomMsg + ", sleep for %s secs and retry, attempt %s of %s", TimeUnit.MILLISECONDS.toSeconds( Constants.MEMORY_CHECK_INTERVAL_MS), attempt, Constants.MEMORY_CHECK_RETRY_LIMIT)); if (attempt == 1) { this.commonMetrics.incrementOOMJobWaitCount(); } synchronized (this) { try { this.wait(Constants.MEMORY_CHECK_INTERVAL_MS); } catch (final InterruptedException e) { info(String .format("Job %s interrupted while waiting for memory check retry", getId())); } } if (this.killed) { this.commonMetrics.decrementOOMJobWaitCount(); info(String.format("Job %s was killed while waiting for memory check retry", getId())); return; } } } if (!isMemGranted) { this.commonMetrics.decrementOOMJobWaitCount(); handleError(oomMsg, null); } } List<String> commands = null; try { commands = getCommandList(); } catch (final Exception e) { handleError("Job set up failed: " + e.getMessage(), e); } final long startMs = System.currentTimeMillis(); if (commands == null) { handleError("There are no commands to execute", null); } info(commands.size() + " commands to execute."); final File[] propFiles = initPropsFiles(); // change krb5ccname env var so that each job execution gets its own cache final Map<String, String> envVars = getEnvironmentVariables(); envVars.put(KRB5CCNAME, getKrb5ccname(this.getJobProps())); // determine whether to run as Azkaban or run as effectiveUser, // by default, run as effectiveUser String executeAsUserBinaryPath = null; String effectiveUser = null; final boolean isExecuteAsUser = this.getSysProps().getBoolean(EXECUTE_AS_USER, true); //Get list of users we never execute flows as. (ie: root, azkaban) final Set<String> blackListedUsers = new HashSet<>( Arrays.asList( this.getSysProps() .getString(Constants.ConfigurationKeys.BLACK_LISTED_USERS, "root,azkaban") .split(",") ) ); // nativeLibFolder specifies the path for execute-as-user file, // which will change user from Azkaban to effectiveUser if (isExecuteAsUser) { final String nativeLibFolder = this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER); executeAsUserBinaryPath = String.format("%s/%s", nativeLibFolder, "execute-as-user"); effectiveUser = getEffectiveUser(this.getJobProps()); // Throw exception if Azkaban tries to run flow as a prohibited user if (blackListedUsers.contains(effectiveUser)) { throw new RuntimeException( String.format("Not permitted to proxy as '%s' through Azkaban", effectiveUser) ); } // Set parent directory permissions to <uid>:azkaban so user can write in their execution directory // if the directory is not permissioned correctly already (should happen once per execution) if (!canWriteInCurrentWorkingDirectory(effectiveUser)) { info("Changing current working directory ownership"); assignUserFileOwnership(effectiveUser, getWorkingDirectory()); } // Set property file permissions to <uid>:azkaban so user can write to their prop files // in order to pass properties from one job to another, except the last one for (int i = 0; i < 2; i++) { info("Changing properties files ownership"); assignUserFileOwnership(effectiveUser, propFiles[i].getAbsolutePath()); } } for (String command : commands) { AzkabanProcessBuilder builder = null; if (isExecuteAsUser) { command = String.format("%s %s %s", executeAsUserBinaryPath, effectiveUser, command); info("Command: " + command); builder = new AzkabanProcessBuilder(partitionCommandLine(command)) .setEnv(envVars).setWorkingDir(getCwd()).setLogger(getLog()) .enableExecuteAsUser().setExecuteAsUserBinaryPath(executeAsUserBinaryPath) .setEffectiveUser(effectiveUser); } else { info("Command: " + command); builder = new AzkabanProcessBuilder(partitionCommandLine(command)) .setEnv(envVars).setWorkingDir(getCwd()).setLogger(getLog()); } if (builder.getEnv().size() > 0) { info("Environment variables: " + builder.getEnv()); } info("Working directory: " + builder.getWorkingDir()); // print out the Job properties to the job log. this.logJobProperties(); synchronized (this) { // Make sure that checking if the process job is killed and creating an AzkabanProcess // object are atomic. The cancel method relies on this to make sure that if this.process is // not null, this block of code which includes checking if the job is killed has not been // executed yet. if (this.killed) { info("The job is killed. Abort. No job process created."); return; } this.process = builder.build(); } try { this.process.run(); this.success = true; } catch (final Throwable e) { for (final File file : propFiles) { if (file != null && file.exists()) { file.delete(); } } throw new RuntimeException(e); } finally { info("Process with id " + this.process.getProcessId() + " completed " + (this.success ? "successfully" : "unsuccessfully") + " in " + ((System.currentTimeMillis() - startMs) / 1000) + " seconds."); } } // Get the output properties from this job. generateProperties(propFiles[1]); } /** * <pre> * This method extracts the kerberos ticket cache file name from the jobprops. * This method will ensure that each job execution will have its own kerberos ticket cache file * Given that the code only sets an environmental variable, the number of files created * corresponds * to the number of processes that are doing kinit in their flow, which should not be an * inordinately * high number. * </pre> * * @return file name: the kerberos ticket cache file to use */ private String getKrb5ccname(final Props jobProps) { final String effectiveUser = getEffectiveUser(jobProps); final String projectName = jobProps.getString(CommonJobProperties.PROJECT_NAME).replace(" ", "_"); final String flowId = jobProps.getString(CommonJobProperties.FLOW_ID).replace(" ", "_"); final String jobId = jobProps.getString(CommonJobProperties.JOB_ID).replace(" ", "_"); // execId should be an int and should not have space in it, ever final String execId = jobProps.getString(CommonJobProperties.EXEC_ID); final String krb5ccname = String.format("/tmp/krb5cc__%s__%s__%s__%s__%s", projectName, flowId, jobId, execId, effectiveUser); return krb5ccname; } /** * <pre> * Determines what user id should the process job run as, in the following order of precedence: * 1. USER_TO_PROXY * 2. SUBMIT_USER * </pre> * * @return the user that Azkaban is going to execute as */ private String getEffectiveUser(final Props jobProps) { String effectiveUser = null; if (jobProps.containsKey(JobProperties.USER_TO_PROXY)) { effectiveUser = jobProps.getString(JobProperties.USER_TO_PROXY); } else if (jobProps.containsKey(CommonJobProperties.SUBMIT_USER)) { effectiveUser = jobProps.getString(CommonJobProperties.SUBMIT_USER); } else { throw new RuntimeException( "Internal Error: No user.to.proxy or submit.user in the jobProps"); } info("effective user is: " + effectiveUser); return effectiveUser; } /** * Checks to see if user has write access to current working directory which many users need for * their jobs to store temporary data/jars on the executor. * * Accomplishes this by using execute-as-user to try to create an empty file in the cwd. * * @param effectiveUser user/proxy user running the job * @return true if user has write permissions in current working directory otherwise false */ private boolean canWriteInCurrentWorkingDirectory(final String effectiveUser) throws IOException { final ExecuteAsUser executeAsUser = new ExecuteAsUser( this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER)); final List<String> checkIfUserCanWriteCommand = Arrays .asList(CREATE_FILE, getWorkingDirectory() + "/" + TEMP_FILE_NAME); final int result = executeAsUser.execute(effectiveUser, checkIfUserCanWriteCommand); return result == SUCCESSFUL_EXECUTION; } /** * Changes permissions on file/directory so that the file/directory is owned by the user and the * group remains the azkaban service account name. * * Leverages execute-as-user with "root" as the user to run the command. * * @param effectiveUser user/proxy user running the job * @param fileName the name of the file whose permissions will be changed */ private void assignUserFileOwnership(final String effectiveUser, final String fileName) throws Exception { final ExecuteAsUser executeAsUser = new ExecuteAsUser( this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER)); final String groupName = this.getSysProps().getString(AZKABAN_SERVER_GROUP_NAME, "azkaban"); final List<String> changeOwnershipCommand = Arrays .asList(CHOWN, effectiveUser + ":" + groupName, fileName); info("Change ownership of " + fileName + " to " + effectiveUser + ":" + groupName + "."); final int result = executeAsUser.execute("root", changeOwnershipCommand); if (result != 0) { handleError("Failed to change current working directory ownership. Error code: " + Integer .toString(result), null); } } /** * This is used to get the min/max memory size requirement by processes. SystemMemoryInfo can use * the info to determine if the memory request can be fulfilled. For Java process, this should be * Xms/Xmx setting. * * @return pair of min/max memory size */ protected Pair<Long, Long> getProcMemoryRequirement() throws Exception { return new Pair<>(0L, 0L); } protected void handleError(final String errorMsg, final Exception e) throws Exception { error(errorMsg); if (e != null) { throw new Exception(errorMsg, e); } else { throw new Exception(errorMsg); } } protected List<String> getCommandList() { final List<String> commands = new ArrayList<>(); commands.add(this.getJobProps().getString(COMMAND)); for (int i = 1; this.getJobProps().containsKey(COMMAND + "." + i); i++) { commands.add(this.getJobProps().getString(COMMAND + "." + i)); } return commands; } @Override public void cancel() throws InterruptedException { // in case the job is waiting synchronized (this) { this.killed = true; this.notify(); if (this.process == null) { // The job thread has not checked if the job is killed yet. // setting the killed flag should be enough to abort the job. // There is no job process to kill. return; } } this.process.awaitStartup(); final boolean processkilled = this.process .softKill(KILL_TIME.toMillis(), TimeUnit.MILLISECONDS); if (!processkilled) { warn("Kill with signal TERM failed. Killing with KILL signal."); this.process.hardKill(); } } @Override public double getProgress() { return this.process != null && this.process.isComplete() ? 1.0 : 0.0; } public int getProcessId() { return this.process.getProcessId(); } @VisibleForTesting boolean isSuccess() { return this.success; } @VisibleForTesting AzkabanProcess getProcess() { return this.process; } public String getPath() { return Utils.ifNull(this.getJobPath(), ""); } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.plugin.addrbook.macosx; import java.util.*; import net.java.sip.communicator.service.contactsource.*; /** * The editable detail, change get changed and in addressbook. * * @author Lyubomir Marinov */ public class MacOSXAddrBookContactDetail extends EditableContactDetail { /** * The property index for this detail. */ private final int property; /** * The id of the detail. */ private String id; private String subPropertyLabel; /** * Initializes a new <tt>ContactDetail</tt> instance which is to represent a * specific contact address and which is to be optionally labeled with a * specific set of labels. * * @param contactDetailValue the contact detail value to be represented by * the new <tt>ContactDetail</tt> instance * @param category * @param subCategories the set of sub categories with which the new * <tt>ContactDetail</tt> instance is to be labeled. * @param id The id of the detail. */ public MacOSXAddrBookContactDetail( int property, String contactDetailValue, Category category, SubCategory[] subCategories, String subPropertyLabel, String id) { super(contactDetailValue, category, subCategories); this.property = property; this.subPropertyLabel = subPropertyLabel; this.id = id; } /** * Whether the value for the category are multiline. * @param category * @return */ public static boolean isMultiline(Category category) { switch(category) { case Personal: return false; case Organization: return false; case Email: return true; case InstantMessaging: return true; case Phone: return true; case Address: return true; default: return false; } } /** * Sets the given detail value. * * @param value the new value of the detail */ @Override public void setDetail(String value) { //let's save in addressbook if(isMultiline(getCategory())) { // get others EditableSourceContact sourceContact = getSourceContact(); if(sourceContact != null && sourceContact instanceof MacOSXAddrBookSourceContact) { List<ContactDetail> details = ((MacOSXAddrBookSourceContact) sourceContact) .getContactDetails(getCategory()); boolean isAddress = property == MacOSXAddrBookContactQuery.kABAddressProperty; boolean isHomeAddress = containsSubCategory(SubCategory.Home); // For an address, we must check that the current detail is the // modified one. For all other properties than address, this // boolean must always be true. boolean isModifiedAddressOrGenericDetail; // first add existing one List<String> values = new ArrayList<String>(); for(ContactDetail cd : details) { isModifiedAddressOrGenericDetail = true; if(isAddress) { // lets check home and work details if((isHomeAddress && !cd.containsSubCategory(SubCategory.Home) ) || (!isHomeAddress && !cd.containsSubCategory(SubCategory.Work) )) { isModifiedAddressOrGenericDetail = false; } } String det = cd.getDetail(); for(SubCategory sub : cd.getSubCategories()) { String label = MacOSXAddrBookContactQuery. getLabel(property, sub, subPropertyLabel); if(label != null) { if(getSubCategories().contains(sub) && isModifiedAddressOrGenericDetail) values.add(value); else values.add(det); values.add(label); // For an address adds a third item for the tuple: // value, label, sub-property label. if(isAddress && cd instanceof MacOSXAddrBookContactDetail ) { values.add( ((MacOSXAddrBookContactDetail) cd) .getSubPropertyLabel()); } } } } // now the real edit MacOSXAddrBookContactQuery.setProperty( id, MacOSXAddrBookContactQuery.ABPERSON_PROPERTIES[ property], subPropertyLabel, values.toArray(new Object[values.size()])); } } else { MacOSXAddrBookContactQuery.setProperty( id, MacOSXAddrBookContactQuery.ABPERSON_PROPERTIES[ property], null, value); } super.setDetail(value); } /** * Returns the sub property. * @return */ public String getSubPropertyLabel() { return subPropertyLabel; } /** * Returns the property index for this detail. * * @return The property index for this detail. */ public final int getProperty() { return this.property; } }
package io.realm.objectserver; import android.os.Handler; import android.os.Looper; import android.os.SystemClock; import android.support.test.runner.AndroidJUnit4; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import io.realm.AuthenticationListener; import io.realm.ErrorCode; import io.realm.ObjectServerError; import io.realm.Realm; import io.realm.RealmConfiguration; import io.realm.StandardIntegrationTest; import io.realm.SyncConfiguration; import io.realm.SyncCredentials; import io.realm.SyncManager; import io.realm.SyncSession; import io.realm.SyncUser; import io.realm.SyncUserInfo; import io.realm.TestHelper; import io.realm.entities.StringOnly; import io.realm.internal.async.RealmAsyncTaskImpl; import io.realm.internal.objectserver.Token; import io.realm.objectserver.utils.Constants; import io.realm.objectserver.utils.StringOnlyModule; import io.realm.objectserver.utils.UserFactory; import io.realm.rule.RunTestInLooperThread; import io.realm.util.SyncTestUtils; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; import static junit.framework.Assert.fail; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(AndroidJUnit4.class) public class AuthTests extends StandardIntegrationTest { @Test public void login_userNotExist() { SyncCredentials credentials = SyncCredentials.usernamePassword("IWantToHackYou", "GeneralPassword", false); try { SyncUser.login(credentials, Constants.AUTH_URL); fail(); } catch (ObjectServerError expected) { assertEquals(ErrorCode.INVALID_CREDENTIALS, expected.getErrorCode()); } } @Test @RunTestInLooperThread public void loginAsync_userNotExist() { SyncCredentials credentials = SyncCredentials.usernamePassword("IWantToHackYou", "GeneralPassword", false); SyncUser.loginAsync(credentials, Constants.AUTH_URL, new SyncUser.Callback<SyncUser>() { @Override public void onSuccess(SyncUser user) { fail(); } @Override public void onError(ObjectServerError error) { assertEquals(ErrorCode.INVALID_CREDENTIALS, error.getErrorCode()); looperThread.testComplete(); } }); } @Test @RunTestInLooperThread public void login_newUser() { String userId = UUID.randomUUID().toString(); SyncCredentials credentials = SyncCredentials.usernamePassword(userId, "password", true); SyncUser.loginAsync(credentials, Constants.AUTH_URL, new SyncUser.Callback<SyncUser>() { @Override public void onSuccess(SyncUser user) { assertFalse(user.isAdmin()); try { assertEquals(new URL(Constants.AUTH_URL), user.getAuthenticationUrl()); } catch (MalformedURLException e) { fail(e.toString()); } looperThread.testComplete(); } @Override public void onError(ObjectServerError error) { fail(error.toString()); } }); } @Test @RunTestInLooperThread public void login_withAccessToken() { SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); SyncCredentials credentials = SyncCredentials.accessToken(SyncTestUtils.getRefreshToken(adminUser).value(), "custom-admin-user", adminUser.isAdmin()); SyncUser.loginAsync(credentials, Constants.AUTH_URL, new SyncUser.Callback<SyncUser>() { @Override public void onSuccess(SyncUser user) { assertTrue(user.isAdmin()); final SyncConfiguration config = new SyncConfiguration.Builder(user, Constants.SYNC_SERVER_URL) .errorHandler(new SyncSession.ErrorHandler() { @Override public void onError(SyncSession session, ObjectServerError error) { fail("Session failed: " + error); } }) .build(); final Realm realm = Realm.getInstance(config); looperThread.addTestRealm(realm); assertTrue(config.getUser().isValid()); looperThread.testComplete(); } @Override public void onError(ObjectServerError error) { fail("Login failed: " + error); } }); } @Test public void loginAsync_errorHandlerThrows() throws InterruptedException { final AtomicBoolean errorThrown = new AtomicBoolean(false); // Create custom Looper thread to be able to check for errors thrown when processing Looper events. Thread t = new Thread(new Runnable() { private volatile Handler handler; @Override public void run() { Looper.prepare(); try { handler = new Handler(); handler.post(new Runnable() { @Override public void run() { SyncCredentials credentials = SyncCredentials.usernamePassword("IWantToHackYou", "GeneralPassword", false); SyncUser.loginAsync(credentials, Constants.AUTH_URL, new SyncUser.Callback<SyncUser>() { @Override public void onSuccess(SyncUser user) { fail(); } @Override public void onError(ObjectServerError error) { assertEquals(ErrorCode.INVALID_CREDENTIALS, error.getErrorCode()); throw new IllegalArgumentException("BOOM"); } }); } }); Looper.loop(); // } catch (IllegalArgumentException e) { errorThrown.set(true); } } }); t.start(); t.join(TimeUnit.SECONDS.toMillis(10)); assertTrue(errorThrown.get()); } @Test public void changePassword() { String username = UUID.randomUUID().toString(); String originalPassword = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, originalPassword, true); SyncUser userOld = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userOld.isValid()); // Change password and try to log in with new password String newPassword = "new-password"; userOld.changePassword(newPassword); userOld.logout(); // Make sure old password doesn't work try { SyncUser.login(SyncCredentials.usernamePassword(username, originalPassword, false), Constants.AUTH_URL); fail(); } catch (ObjectServerError e) { assertEquals(ErrorCode.INVALID_CREDENTIALS, e.getErrorCode()); } // Then login with new password credentials = SyncCredentials.usernamePassword(username, newPassword, false); SyncUser userNew = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userNew.isValid()); assertEquals(userOld.getIdentity(), userNew.getIdentity()); } @Test public void changePassword_using_admin() { String username = UUID.randomUUID().toString(); String originalPassword = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, originalPassword, true); SyncUser userOld = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userOld.isValid()); // Login an admin user SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); assertTrue(adminUser.isValid()); assertTrue(adminUser.isAdmin()); // Change password using admin user String newPassword = "new-password"; adminUser.changePassword(userOld.getIdentity(), newPassword); // Try to log in with new password userOld.logout(); credentials = SyncCredentials.usernamePassword(username, newPassword, false); SyncUser userNew = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userNew.isValid()); assertEquals(userOld.getIdentity(), userNew.getIdentity()); } @Test @RunTestInLooperThread public void changePassword_using_admin_async() { final String username = UUID.randomUUID().toString(); final String originalPassword = "password"; final SyncCredentials credentials = SyncCredentials.usernamePassword(username, originalPassword, true); final SyncUser userOld = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userOld.isValid()); // Login an admin user final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); assertTrue(adminUser.isValid()); assertTrue(adminUser.isAdmin()); // Change password using admin user final String newPassword = "new-password"; adminUser.changePasswordAsync(userOld.getIdentity(), newPassword, new SyncUser.Callback<SyncUser>() { @Override public void onSuccess(SyncUser administratorUser) { assertEquals(adminUser, administratorUser); // Try to log in with new password userOld.logout(); SyncCredentials credentials = SyncCredentials.usernamePassword(username, newPassword, false); SyncUser userNew = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(userNew.isValid()); assertEquals(userOld.getIdentity(), userNew.getIdentity()); looperThread.testComplete(); } @Override public void onError(ObjectServerError error) { fail(error.getErrorMessage()); } }); } @Test @RunTestInLooperThread public void changePassword_throwWhenUserIsLoggedOut() { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); SyncManager.addAuthenticationListener(new AuthenticationListener() { @Override public void loggedIn(SyncUser user) { SyncManager.removeAuthenticationListener(this); // callback is happening on different thread, all assertions needs to be done on looper thread looperThread.postRunnable(new Runnable() { @Override public void run() { fail("loggedIn should not be invoked"); } }); } @Override public void loggedOut(SyncUser user) { SyncManager.removeAuthenticationListener(this); try { user.changePassword("new-password"); looperThread.postRunnable(new Runnable() { @Override public void run() { fail("changePassword should throw ObjectServerError (INVALID CREDENTIALS)"); } }); } catch (ObjectServerError expected) { } looperThread.testComplete(); } }); user.logout(); } @Test public void cachedInstanceShouldNotThrowIfRefreshTokenExpires() throws InterruptedException { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); final SyncUser user = spy(SyncUser.login(credentials, Constants.AUTH_URL)); when(user.isValid()).thenReturn(true, false); final RealmConfiguration configuration = new SyncConfiguration.Builder(user, Constants.USER_REALM).build(); Realm realm = Realm.getInstance(configuration); assertFalse(user.isValid()); verify(user, times(2)).isValid(); final CountDownLatch backgroundThread = new CountDownLatch(1); // Should not throw when using an expired refresh_token form a different thread // It should be able to open a Realm with an expired token new Thread() { @Override public void run() { Realm instance = Realm.getInstance(configuration); instance.close(); backgroundThread.countDown(); } }.start(); backgroundThread.await(); // It should be possible to open a cached Realm with expired token Realm cachedInstance = Realm.getInstance(configuration); assertNotNull(cachedInstance); realm.close(); cachedInstance.close(); user.logout(); } @Test public void buildingSyncConfigurationShouldThrowIfInvalidUser() { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); SyncUser currentUser = SyncUser.currentUser(); user.logout(); assertFalse(user.isValid()); try { // We should not be able to build a configuration with an invalid/logged out user new SyncConfiguration.Builder(user, Constants.USER_REALM).build(); fail("Invalid user, it should not be possible to create a SyncConfiguration"); } catch (IllegalArgumentException expected) { // User not authenticated or authentication expired. } try { // We should not be able to build a configuration with an invalid/logged out user new SyncConfiguration.Builder(currentUser, Constants.USER_REALM).build(); fail("Invalid currentUser, it should not be possible to create a SyncConfiguration"); } catch (IllegalArgumentException expected) { // User not authenticated or authentication expired. } } // using a logout user should not throw @Test public void usingConfigurationWithInvalidUserShouldThrow() { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); RealmConfiguration configuration = new SyncConfiguration.Builder(user, Constants.USER_REALM).build(); user.logout(); assertFalse(user.isValid()); Realm instance = Realm.getInstance(configuration); instance.close(); } @Test public void logout_currentUserMoreThanOne() { UserFactory.createUniqueUser(Constants.AUTH_URL); SyncUser.currentUser().logout(); SyncUser user = UserFactory.createUniqueUser(Constants.AUTH_URL); assertEquals(user, SyncUser.currentUser()); } // logging out 'user' should have the same impact on other instance(s) of the same user @Test public void loggingOutUserShouldImpactOtherInstances() throws InterruptedException { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); SyncUser currentUser = SyncUser.currentUser(); assertTrue(user.isValid()); assertEquals(user, currentUser); user.logout(); assertFalse(user.isValid()); assertFalse(currentUser.isValid()); } // logging out 'currentUser' should have the same impact on other instance(s) of the user @Test public void loggingOutCurrentUserShouldImpactOtherInstances() throws InterruptedException { String username = UUID.randomUUID().toString(); String password = "password"; SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); SyncUser currentUser = SyncUser.currentUser(); assertTrue(user.isValid()); assertEquals(user, currentUser); SyncUser.currentUser().logout(); assertFalse(user.isValid()); assertFalse(currentUser.isValid()); assertNull(SyncUser.currentUser()); } // verify that multiple users can be logged in at the same time @Test public void multipleUsersCanBeLoggedInSimultaneously() { final String password = "password"; final SyncUser[] users = new SyncUser[3]; for (int i = 0; i < users.length; i++) { SyncCredentials credentials = SyncCredentials.usernamePassword(UUID.randomUUID().toString(), password, true); users[i] = SyncUser.login(credentials, Constants.AUTH_URL); } for (int i = 0; i < users.length; i++) { assertTrue(users[i].isValid()); } for (int i = 0; i < users.length; i++) { users[i].logout(); } for (int i = 0; i < users.length; i++) { assertFalse(users[i].isValid()); } } // verify that a single user can be logged out and back in. @Test public void singleUserCanBeLoggedInAndOutRepeatedly() { final String username = UUID.randomUUID().toString(); final String password = "password"; // register the user the first time SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(user.isValid()); user.logout(); assertFalse(user.isValid()); // on subsequent logins, the user is already registered. credentials = credentials = SyncCredentials.usernamePassword(username, password, false); for (int i = 0; i < 3; i++) { user = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(user.isValid()); user.logout(); assertFalse(user.isValid()); } } @Test public void revokedRefreshTokenIsNotSameAfterLogin() throws InterruptedException { final CountDownLatch userLoggedInAgain = new CountDownLatch(1); final String uniqueName = UUID.randomUUID().toString(); final SyncCredentials credentials = SyncCredentials.usernamePassword(uniqueName, "password", true); SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); final Token revokedRefreshToken = SyncTestUtils.getRefreshToken(user); SyncManager.addAuthenticationListener(new AuthenticationListener() { @Override public void loggedIn(SyncUser user) { } @Override public void loggedOut(SyncUser user) { SyncCredentials credentials = SyncCredentials.usernamePassword(uniqueName, "password", false); SyncUser loggedInUser = SyncUser.login(credentials, Constants.AUTH_URL); Token token = SyncTestUtils.getRefreshToken(loggedInUser); // still comparing the same user assertEquals(revokedRefreshToken.identity(), token.identity()); // different tokens assertNotEquals(revokedRefreshToken.value(), token.value()); SyncManager.removeAuthenticationListener(this); userLoggedInAgain.countDown(); } }); user.logout(); TestHelper.awaitOrFail(userLoggedInAgain); } // The pre-emptive token refresh subsystem should function, and properly refresh the access token. // WARNING: this test can fail if there's a difference between the server's and device's clock, causing the // refresh access token to be too far in time. @Test(timeout = 30000) public void preemptiveTokenRefresh() throws NoSuchFieldException, IllegalAccessException, InterruptedException { SyncUser user = UserFactory.createUniqueUser(Constants.AUTH_URL); // make the access tokens map accessible Field realmsField = SyncUser.class.getDeclaredField("realms"); realmsField.setAccessible(true); @SuppressWarnings("unchecked") // using reflection Map<SyncConfiguration, Token> accessTokens = (Map<SyncConfiguration, Token>) realmsField.get(user); final SyncConfiguration syncConfiguration = configurationFactory .createSyncConfigurationBuilder(user, Constants.SYNC_SERVER_URL) .modules(new StringOnlyModule()) .errorHandler(new SyncSession.ErrorHandler() { @Override public void onError(SyncSession session, ObjectServerError error) { fail(error.getErrorMessage()); } }) .build(); Realm realm = Realm.getInstance(syncConfiguration); // create and wait for a transaction to be uploaded, // this guarantees that an accessToken is available realm.executeTransaction(new Realm.Transaction() { @Override public void execute(Realm realm) { realm.createObject(StringOnly.class).setChars("1"); } }); SyncSession session = SyncManager.getSession(syncConfiguration); session.uploadAllLocalChanges(); assertFalse(accessTokens.isEmpty()); Assert.assertEquals(1, accessTokens.size()); Map.Entry<SyncConfiguration, Token> entry = accessTokens.entrySet().iterator().next(); Assert.assertEquals(syncConfiguration, entry.getKey()); final Token accessToken = entry.getValue(); Assert.assertNotNull(accessToken); // getting refresh token delay Field refreshTokenTaskField = SyncSession.class.getDeclaredField("refreshTokenTask"); refreshTokenTaskField.setAccessible(true); RealmAsyncTaskImpl task = (RealmAsyncTaskImpl) refreshTokenTaskField.get(session); Field pendingTaskField = RealmAsyncTaskImpl.class.getDeclaredField("pendingTask"); pendingTaskField.setAccessible(true); ScheduledFuture<?> pendingTask = (ScheduledFuture<?>) pendingTaskField.get(task); long nextRefreshTokenRefreshQueryDelay = pendingTask.getDelay(TimeUnit.MILLISECONDS); // current configuration 'realm-java/tools/sync_test_server/configuration.yml' // is setting the access token to expire every 20 seconds 'access_token: 20' // we wait approximately actually 10 seconds since the SyncSession.REFRESH_MARGIN_DELAY is 10s SystemClock.sleep(nextRefreshTokenRefreshQueryDelay); // allow 3 seconds for the query to perform and complete SystemClock.sleep(TimeUnit.SECONDS.toMillis(3)); Token newAccessToken = accessTokens.get(syncConfiguration); assertThat("new Token expires after the old one", newAccessToken.expiresMs(), greaterThan(accessToken.expiresMs())); assertNotEquals(accessToken, newAccessToken); // refresh_token identity is the same assertEquals(SyncTestUtils.getRefreshToken(user).identity(), newAccessToken.identity()); assertEquals(accessToken.identity(), newAccessToken.identity()); realm.close(); } @Test public void retrieve() { final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); final String username = UUID.randomUUID().toString(); final String password = "password"; final SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); final SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(user.isValid()); String identity = user.getIdentity(); SyncUserInfo userInfo = adminUser.retrieveInfoForUser(username, SyncCredentials.IdentityProvider.USERNAME_PASSWORD); assertNotNull(userInfo); assertEquals(identity, userInfo.getIdentity()); assertFalse(userInfo.isAdmin()); assertTrue(userInfo.getMetadata().isEmpty()); assertEquals(username, userInfo.getAccounts().get(SyncCredentials.IdentityProvider.USERNAME_PASSWORD)); } // retrieving a logged out user @Test @RunTestInLooperThread public void retrieve_logout() { final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); final String username = UUID.randomUUID().toString(); final String password = "password"; final SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); final SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); final String identity = user.getIdentity(); // unless the refresh_token is revoked (via logout) the admin user can still retrieve the user // we make sure the token is revoked before trying to retrieve the user SyncManager.addAuthenticationListener(new AuthenticationListener() { @Override public void loggedIn(SyncUser user) { SyncManager.removeAuthenticationListener(this); looperThread.postRunnable(new Runnable() { @Override public void run() { fail("loggedIn should not be invoked"); } }); } @Override public void loggedOut(final SyncUser user) { SyncManager.removeAuthenticationListener(this); looperThread.postRunnable(new Runnable() { @Override public void run() { assertFalse(user.isValid()); SyncUserInfo userInfo = adminUser.retrieveInfoForUser(username, SyncCredentials.IdentityProvider.USERNAME_PASSWORD); assertNotNull(userInfo); assertEquals(identity, userInfo.getIdentity()); assertFalse(userInfo.isAdmin()); assertTrue(userInfo.getMetadata().isEmpty()); assertEquals(username, userInfo.getAccounts().get(SyncCredentials.IdentityProvider.USERNAME_PASSWORD)); looperThread.testComplete(); } }); } }); user.logout(); } @Test public void retrieve_unknownProviderId() { final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); SyncUserInfo userInfo = adminUser.retrieveInfoForUser("doesNotExist", SyncCredentials.IdentityProvider.USERNAME_PASSWORD); assertNull(userInfo); } @Test public void retrieve_invalidProvider() { final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); final String username = UUID.randomUUID().toString(); final String password = "password"; final SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); final SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(user.isValid()); SyncUserInfo userInfo = adminUser.retrieveInfoForUser("username", "invalid"); assertNull(userInfo); } @Test public void retrieve_notAdmin() { final String username1 = UUID.randomUUID().toString(); final String password1 = "password"; final SyncCredentials credentials1 = SyncCredentials.usernamePassword(username1, password1, true); final SyncUser user1 = SyncUser.login(credentials1, Constants.AUTH_URL); assertTrue(user1.isValid()); final String username2 = UUID.randomUUID().toString(); final String password2 = "password"; final SyncCredentials credentials2 = SyncCredentials.usernamePassword(username2, password2, true); final SyncUser user2 = SyncUser.login(credentials2, Constants.AUTH_URL); assertTrue(user2.isValid()); // trying to lookup user2 using user1 should not work (requires admin token) try { user1.retrieveInfoForUser(SyncCredentials.IdentityProvider.USERNAME_PASSWORD, username2); fail("It should not be possible to lookup a user using non admin token"); } catch (IllegalArgumentException expected) { } } @Test @RunTestInLooperThread public void retrieve_async() { final String username = UUID.randomUUID().toString(); final String password = "password"; final SyncCredentials credentials = SyncCredentials.usernamePassword(username, password, true); final SyncUser user = SyncUser.login(credentials, Constants.AUTH_URL); assertTrue(user.isValid()); // Login an admin user final SyncUser adminUser = UserFactory.createAdminUser(Constants.AUTH_URL); assertTrue(adminUser.isValid()); assertTrue(adminUser.isAdmin()); final String identity = user.getIdentity(); adminUser.retrieveInfoForUserAsync(username, SyncCredentials.IdentityProvider.USERNAME_PASSWORD, new SyncUser.Callback<SyncUserInfo>() { @Override public void onSuccess(SyncUserInfo userInfo) { assertNotNull(userInfo); assertEquals(identity, userInfo.getIdentity()); assertFalse(userInfo.isAdmin()); assertTrue(userInfo.getMetadata().isEmpty()); assertEquals(username, userInfo.getAccounts().get(SyncCredentials.IdentityProvider.USERNAME_PASSWORD)); looperThread.testComplete(); } @Override public void onError(ObjectServerError error) { fail(error.getErrorMessage()); } }); } }
package com.insightfullogic.honest_profiler.ports.javafx.view.flame; import static java.lang.Math.abs; import static java.lang.Math.exp; import static javafx.scene.paint.Color.WHITE; import static javafx.scene.paint.Color.hsb; import java.util.ArrayList; import java.util.List; import java.util.Optional; import com.insightfullogic.honest_profiler.core.aggregation.result.Aggregation; import com.insightfullogic.honest_profiler.core.aggregation.result.Keyed; import com.insightfullogic.honest_profiler.core.aggregation.result.Parent; import com.insightfullogic.honest_profiler.core.aggregation.result.diff.DiffNode; import com.insightfullogic.honest_profiler.core.aggregation.result.diff.TreeDiff; import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Node; import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Tree; import com.insightfullogic.honest_profiler.core.profiles.lean.info.MethodInfo; import com.insightfullogic.honest_profiler.ports.javafx.model.ApplicationContext; import javafx.scene.canvas.Canvas; import javafx.scene.canvas.GraphicsContext; import javafx.scene.control.ScrollPane; import javafx.scene.control.Tooltip; import javafx.scene.input.MouseEvent; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; /** * Abstract class with common code for rendering a FlameGraph. * * @param <T> the type of {@link Aggregation} being rendered * @param <U> the type of data items in the aggregation */ public abstract class AbstractFlameCanvas<T, U extends Keyed<String> & Parent<U>> extends Canvas { // Class Properties // The colour palettes used are in fact discrete gradients, "bucketized" with this number of buckets protected static final int BUCKETS = 50; // Start colour for interpolation of non-diff method flame blocks protected static final Color DEFAUL_START = hsb(28, 1., .5); // End colour for interpolation of non-diff method flame blocks protected static final Color DEFAULT_END = hsb(39, .22, 1.); private static final int TEXT_WIDTH = 7; // Instance Properties private ApplicationContext appCtx; private List<FlameBlock<U>> flameBlocks; private Tooltip tooltip; private final ScrollPane scrollPane; // Instance COnstructors /** * Simple constructor. * * @param applicationContext the {@link ApplicationContext} for the application */ protected AbstractFlameCanvas(ApplicationContext applicationContext) { this.appCtx = applicationContext; this.flameBlocks = new ArrayList<>(); this.tooltip = new Tooltip(); final ScrollPane sp = new ScrollPane(); sp.setHbarPolicy(ScrollPane.ScrollBarPolicy.NEVER); sp.setVbarPolicy(ScrollPane.ScrollBarPolicy.ALWAYS); sp.setFitToWidth(true); sp.setContent(this); this.scrollPane = sp; setOnMouseMoved(this::displayMethodName); } // Instance Accessors /** * Returns the {@link ApplicationContext} for the application. * * @return the {@link ApplicationContext} for the application. */ protected ApplicationContext appCtx() { return appCtx; } // Abstract Methods /** * Returns the {@link MethodInfo} object for the method aggregated in the specified {@link Node} or * {@link DiffNode}. Returns null if the node is a thread-level aggregation. * * @param node the {@link Node} or {@link DiffNode} * @return the {@link MethodInfo} for the method aggregated in the specified node */ protected abstract MethodInfo methodForNode(U node); /** * Renders the specified {@link Tree} or {@link TreeDiff}. * * @param tree the {@link Tree} or {@link TreeDiff} to be rendered */ public abstract void render(T tree); public ScrollPane getScrollPane() { return scrollPane; } /** * Returns the total sample count for the specified {@link Node} or {@link DiffNode}. * * @param node the input {@link Node} or {@link DiffNode} * @return the total sample count for the specified node */ protected abstract int getTotalCount(U node); /** * Returns the information about the node to be shown in the InfoBar when the corresponding {@link FlameBlock} is * hoverd over. * * @param node the {@link Node} or {@link DiffNode} * @return the information about the node to be shown in the InfoBar. */ protected abstract String getNodeInfo(U node); // FlameBlock Management Methods /** * Clears the internal list of {@link FlameBlock}s. */ protected void clearBlocks() { flameBlocks.clear(); } /** * Add a {@link FlameBlock} with the specified parameters for the specified {@link Node} or {@link DiffNode}. * * @param x the x coordinate at which the block has been rendered * @param y the y coordinate at which the block has been rendered * @param width the width of the block * @param height the height of the block * @param node the node whose information is rendered in the block */ protected void addBlock(double x, double y, double width, double height, U node) { flameBlocks.add(new FlameBlock<U>(new Rectangle(x, y, width, height), node)); } // UI Helper Methods /** * Returns the pre-configured {@link GraphicsContext} for the {@link Canvas}. * * @return the pre-configured {@link GraphicsContext} for the {@link Canvas} */ protected GraphicsContext getGraphics() { GraphicsContext ctx = getGraphicsContext2D(); ctx.clearRect(0, 0, getWidth(), getHeight()); ctx.setStroke(WHITE); return ctx; } /** * Renders the {@link Node} or {@link DiffNode}, and recursively renders the descendant nodes. * * @param ctx the {@link GraphicsContext} in which the node is rendered * @param node the node to be rendered * @param row the row in which the node is rendered * @param columnWidth the width for rendering the node box * @param rowHeight the height for rendering the node box * @param startX the x coordinate where to render the node * @param startY the y coordinate where the entire graph is rendered * @return the new x coordinate for rendering the next node in the same row */ protected double renderNode(GraphicsContext ctx, U node, int row, double columnWidth, double rowHeight, double startX, double startY) { // Colour based on diff pct or row index for (nearly) no difference. Color fill = colorFor(node, row); ctx.setFill(fill); double x = startX; double y = startY - ((row + 1) * rowHeight); double width = getTotalCount(node) * columnWidth; ctx.fillRect(x, y, width, rowHeight); addBlock(x, y, width, rowHeight, node); renderNodeText(ctx, fill.invert().darker(), x, y, width, rowHeight, node); for (U child : node.getChildren()) { x += renderNode(ctx, child, row + 1, columnWidth, rowHeight, x, startY); } return width; } /** * Returns the {@link Color} in the default gradient palette based on the default start and end {@link Color}s, and * interpolating based on the row number. If the row number is larger than {@link #BUCKETS}, the gradient wraps * round. * <p> * This is the default colouring scheme for non-diff FlameGraph nodes. * * @param node the {@link Node} or {@link DiffNode} for which the {@link Color} is being picked * @param row the row on which the node is situated * @return the corresponding {@link Color} from the default palette */ protected Color colorFor(U node, int row) { return DEFAUL_START.interpolate(DEFAULT_END, (row % BUCKETS) / (double)BUCKETS); } /** * Converts a percentage to an integer between 0 and {@link #BUCKETS}, with the bucket boundaries being determined * by {@link #limit(int)}. * * @param pct the percentage to be converted * @return an integer between 0 and {@link #BUCKETS} */ protected int convertPercent(double pct) { double absPct = abs(pct); for (int i = 1; i < BUCKETS; i++) { if (absPct < limit(i)) { return i - 1; } } return BUCKETS; } // Internal Implementation Methods /** * Renders the textual information for the {@link Node} or {@link DiffNode}. * * @param ctx the {@link GraphicsContext} in which the text is rendered * @param color the {@link Color} for rendering the text * @param x the x coordinate where the text should be rendered * @param y the y coordinate where the text should be rendered * @param width the width within which the text should fit * @param height the height within which the text should fit * @param node the node whose information is to be rendered */ private void renderNodeText(final GraphicsContext ctx, Color color, final double x, final double y, double width, double height, U node) { List<String> titles = new ArrayList<>(); titles.add(node.getKey()); MethodInfo method = methodForNode(node); if (method != null) { titles.add(method.getFqmn()); titles.add(method.getCompactName()); titles.add(method.getMethodName()); } renderText(ctx, color, x, y, width, height, titles.toArray(new String[titles.size()])); } /** * Examine the width of the provided Strings, which should be specified in decreasing length, and render the longest * one which can be rendered in a rectangle with the specified width, or do not render anything. * * @param ctx the {@link GraphicsContext} for the rendition * @param color the color for the rendition * @param x the x coordinate where the text should be rendered * @param y the y coordinate where the text should be rendered * @param width the maximum width of the rectangle within which the text should be rendered * @param height the height of the rectangle within which the text should be rendered * @param titles the alternative labels to be rendered */ private void renderText(final GraphicsContext ctx, Color color, final double x, final double y, double width, double height, String... titles) { for (String title : titles) { if (title.length() * TEXT_WIDTH < width) { ctx.setFill(color); ctx.fillText(title, x, y + (0.75 * height)); return; } } } /** * Returns the upper boundary for the specified bucket number. The boundaries increase logarithmically. * * @param i the bucket number * @return the upper boundary */ private double limit(int i) { // works well for range 1-20, so rescale from there return (exp(i * 0.3) / 1000.) * BUCKETS / 20; } /** * When the mouse enters the region of a {@link FlameBlock}, this method displays a tooltip containing the method * name of the {@link Node} or {@link DiffNode} represented by the {@link FlameBlock} being hovered over, and * displays more detailed information in the InfoBar. * <p> * If the mouse leaves a region, the information is hidden or cleared. * * @param mouseEvent the {@link MouseEvent} triggering the display */ private void displayMethodName(final MouseEvent mouseEvent) { double x = mouseEvent.getX(); double y = mouseEvent.getY(); Optional<FlameBlock<U>> flameBlock = flameBlocks.stream() .filter(location -> location.contains(x, y)).findFirst(); if (flameBlock.isPresent()) { U node = flameBlock.get().getNode(); tooltip.setText(node.getKey()); tooltip.show(getScene().getWindow(), x, y); appCtx.setRawInfo(getNodeInfo(node)); } else { tooltip.hide(); appCtx.clearInfo(); } } }
/* * Copyright 2015 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.Iterables; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.parsing.parser.trees.Comment; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Set; import java.util.regex.Pattern; import javax.annotation.Nullable; /** Checks for misplaced, misused or deprecated JSDoc annotations. */ final class CheckJSDoc extends AbstractPostOrderCallback implements CompilerPass { public static final DiagnosticType MISPLACED_MSG_ANNOTATION = DiagnosticType.disabled( "JSC_MISPLACED_MSG_ANNOTATION", "Misplaced message annotation. @desc, @hidden, @meaning, and @alternateMessageId" + " annotations should be only on message nodes." + "\nMessage constants must be prefixed with 'MSG_'."); public static final DiagnosticType MISPLACED_ANNOTATION = DiagnosticType.warning("JSC_MISPLACED_ANNOTATION", "Misplaced {0} annotation. {1}"); public static final DiagnosticType ANNOTATION_DEPRECATED = DiagnosticType.warning("JSC_ANNOTATION_DEPRECATED", "The {0} annotation is deprecated. {1}"); public static final DiagnosticType DISALLOWED_MEMBER_JSDOC = DiagnosticType.warning("JSC_DISALLOWED_MEMBER_JSDOC", "Class level JSDocs (@interface, @extends, etc.) are not allowed on class members"); static final DiagnosticType ARROW_FUNCTION_AS_CONSTRUCTOR = DiagnosticType.error( "JSC_ARROW_FUNCTION_AS_CONSTRUCTOR", "Arrow functions cannot be used as constructors"); static final DiagnosticType BAD_REST_PARAMETER_ANNOTATION = DiagnosticType.warning( "BAD_REST_PARAMETER_ANNOTATION", "Missing \"...\" in type annotation for rest parameter."); static final DiagnosticType DEFAULT_PARAM_MUST_BE_MARKED_OPTIONAL = DiagnosticType.error( "JSC_DEFAULT_PARAM_MUST_BE_MARKED_OPTIONAL", "Inline JSDoc on default parameters must be marked as optional"); public static final DiagnosticType INVALID_NO_SIDE_EFFECT_ANNOTATION = DiagnosticType.error( "JSC_INVALID_NO_SIDE_EFFECT_ANNOTATION", "@nosideeffects may only appear in externs files."); public static final DiagnosticType INVALID_MODIFIES_ANNOTATION = DiagnosticType.error( "JSC_INVALID_MODIFIES_ANNOTATION", "@modifies may only appear in externs files."); public static final DiagnosticType INVALID_DEFINE_ON_LET = DiagnosticType.error( "JSC_INVALID_DEFINE_ON_LET", "variables annotated with @define may only be declared with VARs, ASSIGNs, or CONSTs"); public static final DiagnosticType MISPLACED_SUPPRESS = DiagnosticType.warning( "JSC_MISPLACED_SUPPRESS", "@suppress annotation not allowed here. See" + " https://github.com/google/closure-compiler/wiki/@suppress-annotations"); public static final DiagnosticType JSDOC_IN_BLOCK_COMMENT = DiagnosticType.warning( "JSC_JSDOC_IN_BLOCK_COMMENT", "Non-JSDoc comment has annotations. Did you mean to start it with '/**'?"); public static final DiagnosticType JSDOC_ON_RETURN = DiagnosticType.warning( "JSC_JSDOC_ON_RETURN", "JSDoc annotations are not supported on return."); private static final Pattern COMMENT_PATTERN = Pattern.compile("(/|(\n[ \t]*))\\*[ \t]*@[a-zA-Z]+[ \t\n{]"); private final AbstractCompiler compiler; private boolean inExterns; CheckJSDoc(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { inExterns = true; NodeTraversal.traverse(compiler, externs, this); inExterns = false; NodeTraversal.traverse(compiler, root, this); } /** * Checks for block comments (e.g. starting with /*) that look like they are JsDoc, and thus * should start with /**. */ private void checkJsDocInBlockComments(String fileName) { if (!compiler.getOptions().preservesDetailedSourceInfo()) { // Comments only available if preservesDetailedSourceInfo is true. return; } for (Comment comment : compiler.getComments(fileName)) { if (comment.type == Comment.Type.BLOCK) { if (COMMENT_PATTERN.matcher(comment.value).find()) { compiler.report( JSError.make( fileName, comment.location.start.line + 1, comment.location.start.column, JSDOC_IN_BLOCK_COMMENT)); } } } } @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isScript()) { checkJsDocInBlockComments(n.getSourceFileName()); } JSDocInfo info = n.getJSDocInfo(); validateTypeAnnotations(n, info); validateFunctionJsDoc(n, info); validateMsgJsDoc(n, info); validateNoCollapse(n, info); validateClassLevelJsDoc(n, info); validateArrowFunction(n); validateRestParameter(n); validateDefaultValue(n); validateTemplates(n, info); validateTypedefs(n, info); validateNoSideEffects(n, info); validateAbstractJsDoc(n, info); validateDefinesDeclaration(n, info); validateSuppress(n, info); validateImplicitCast(n, info); validateClosurePrimitive(n, info); validateReturnJsDoc(n, info); validateLocaleFile(n, info); } private void validateLocaleFile(Node n, JSDocInfo info) { if (info == null || !info.isLocaleFile()) { return; } if (!n.isScript()) { reportMisplaced(n, "localeFile", "localeFile must be in the fileoverview"); } } private void validateSuppress(Node n, JSDocInfo info) { if (info == null || info.getSuppressions().isEmpty()) { return; } switch (n.getToken()) { case FUNCTION: case CLASS: case VAR: case LET: case CONST: case SCRIPT: case MEMBER_FUNCTION_DEF: case GETTER_DEF: case SETTER_DEF: case MEMBER_FIELD_DEF: case COMPUTED_FIELD_DEF: // Suppressions are always valid here. return; case COMPUTED_PROP: if (n.getLastChild().isFunction()) { return; // Suppressions are valid on computed properties that declare functions. } break; case STRING_KEY: if (n.getParent().isObjectLit()) { return; } break; case ASSIGN: case ASSIGN_BITOR: case ASSIGN_BITXOR: case ASSIGN_BITAND: case ASSIGN_LSH: case ASSIGN_RSH: case ASSIGN_URSH: case ASSIGN_ADD: case ASSIGN_SUB: case ASSIGN_MUL: case ASSIGN_DIV: case ASSIGN_MOD: case ASSIGN_EXPONENT: case GETPROP: if (n.getParent().isExprResult()) { return; } break; case CALL: // TODO(blickly): Stop ignoring no-op extraProvide suppression. // We don't actually support extraProvide, but if we did, it would go on a CALL. if (containsOnlySuppressionFor(info, "extraRequire") || containsOnlySuppressionFor(info, "extraProvide")) { return; } break; case WITH: if (containsOnlySuppressionFor(info, "with")) { return; } break; default: break; } if (containsOnlySuppressionFor(info, "missingRequire")) { return; } compiler.report(JSError.make(n, MISPLACED_SUPPRESS)); } private static boolean containsOnlySuppressionFor(JSDocInfo jsdoc, String allowedSuppression) { Set<String> suppressions = jsdoc.getSuppressions(); return suppressions.size() == 1 && Iterables.getOnlyElement(suppressions).equals(allowedSuppression); } private void validateTypedefs(Node n, JSDocInfo info) { if (info == null || !info.hasTypedefType()) { return; } if (isClassDecl(n)) { reportMisplaced(n, "typedef", "@typedef is not allowed on a class declaration."); return; } Node lvalue = NodeUtil.isNameDeclaration(n) || n.isAssign() ? n.getFirstChild() : n; // Static properties for goog.defineClass are rewritten to qualified names before typechecking // runs and are valid as @typedefs. if (!lvalue.isQualifiedName() && !isGoogDefineClassStatic(lvalue)) { reportMisplaced( n, "typedef", "@typedef is only allowed on qualified name declarations. Did you mean @type?"); } else if (isPrototypeOrInstanceDecl(lvalue)) { reportMisplaced( n, "typedef", "@typedef is not allowed on instance or prototype properties. Did you mean @type?"); } } /** Whether this is a property in this object: {@code goog.defineClass(superClass, {statics: {} */ private boolean isGoogDefineClassStatic(Node n) { return n.isStringKey() && n.getParent().isObjectLit() && n.getGrandparent().isStringKey() && n.getGrandparent().getString().equals("statics"); } private void validateTemplates(Node n, JSDocInfo info) { if (info != null && !info.getTemplateTypeNames().isEmpty() && !info.isConstructorOrInterface() && !isClassDecl(n) && !info.containsFunctionDeclaration() && getFunctionDecl(n) == null) { reportMisplaced(n, "template", "@template is only allowed in class, constructor, interface, function " + "or method declarations"); } } /** * @return The function node associated with the function declaration associated with the * specified node, no null if no such function exists. */ @Nullable private static Node getFunctionDecl(Node n) { if (n.isFunction()) { return n; } if (n.isMemberFunctionDef()) { return n.getFirstChild(); } if (NodeUtil.isNameDeclaration(n) && n.getFirstFirstChild() != null && n.getFirstFirstChild().isFunction()) { return n.getFirstFirstChild(); } if (n.isAssign() && n.getFirstChild().isQualifiedName() && n.getLastChild().isFunction()) { return n.getLastChild(); } if (n.isStringKey() && n.getGrandparent() != null && ClosureRewriteClass.isGoogDefineClass(n.getGrandparent()) && n.getFirstChild().isFunction()) { return n.getFirstChild(); } if (n.isGetterDef() || n.isSetterDef()) { return n.getFirstChild(); } if (n.isComputedProp() && n.getLastChild().isFunction()) { return n.getLastChild(); } return null; } private boolean isClassDecl(Node n) { return isClass(n) || (n.isAssign() && isClass(n.getLastChild())) || (NodeUtil.isNameDeclaration(n) && isNameInitializeWithClass(n.getFirstChild())) || isNameInitializeWithClass(n); } private boolean isNameInitializeWithClass(Node n) { return n != null && n.isName() && n.hasChildren() && isClass(n.getFirstChild()); } private boolean isClass(Node n) { return n.isClass() || (n.isCall() && compiler.getCodingConvention().isClassFactoryCall(n)); } private static boolean isPrototypeOrInstanceDecl(Node n) { if (n.isStringKey()) { return false; } if (NodeUtil.isPrototypeProperty(n)) { return true; } Node receiver = NodeUtil.getRootOfQualifiedName(n); return receiver.isThis() || receiver.isSuper(); } /** * Checks that class-level annotations like @interface/@extends are not used on member functions. */ private void validateClassLevelJsDoc(Node n, JSDocInfo info) { if (info != null && n.isMemberFunctionDef() && hasClassLevelJsDoc(info)) { report(n, DISALLOWED_MEMBER_JSDOC); } } private void validateAbstractJsDoc(Node n, JSDocInfo info) { if (info == null || !info.isAbstract()) { return; } if (isClassDecl(n)) { return; } Node functionNode = getFunctionDecl(n); if (functionNode == null) { // @abstract annotation on a non-function report( n, MISPLACED_ANNOTATION, "@abstract", "only functions or non-static methods can be abstract"); return; } if (!info.isConstructor() && NodeUtil.getFunctionBody(functionNode).hasChildren()) { // @abstract annotation on a function with a non-empty body report(n, MISPLACED_ANNOTATION, "@abstract", "function with a non-empty body cannot be abstract"); return; } // TODO(b/124020008): Delete this case when `goog.defineClass` is dropped. boolean isGoogDefineClassConstructor = n.getParent().isObjectLit() && (n.isMemberFunctionDef() || n.isStringKey()) && "constructor".equals(n.getString()); if (NodeUtil.isEs6ConstructorMemberFunctionDef(n) || isGoogDefineClassConstructor) { // @abstract annotation on an ES6 or goog.defineClass constructor report(n, MISPLACED_ANNOTATION, "@abstract", "constructors cannot be abstract"); return; } if (!info.isConstructor() && !n.isMemberFunctionDef() && !n.isStringKey() && !n.isComputedProp() && !n.isGetterDef() && !n.isSetterDef() && !NodeUtil.isPrototypeMethod(functionNode)) { // @abstract annotation on a non-method (or static method) in ES5 report( n, MISPLACED_ANNOTATION, "@abstract", "only functions or non-static methods can be abstract"); return; } if (n.isStaticMember()) { // @abstract annotation on a static method in ES6 report(n, MISPLACED_ANNOTATION, "@abstract", "static methods cannot be abstract"); return; } } private static boolean hasClassLevelJsDoc(JSDocInfo info) { return info.isConstructorOrInterface() || info.hasBaseType() || info.getImplementedInterfaceCount() != 0 || info.getExtendedInterfacesCount() != 0; } /** * Warns when nocollapse annotations are present on nodes * which are not eligible for property collapsing. */ private void validateNoCollapse(Node n, JSDocInfo info) { if (info == null || !info.isNoCollapse()) { return; } if (n.isFromExterns()) { // @nocollapse has no effect in externs reportMisplaced(n, "nocollapse", "This JSDoc has no effect in externs."); return; } if (NodeUtil.isPrototypePropertyDeclaration(n.getParent()) || (n.getParent().isClassMembers() && !n.isStaticMember())) { reportMisplaced(n, "nocollapse", "This JSDoc has no effect on prototype properties."); } } /** * Checks that JSDoc intended for a function is actually attached to a * function. */ private void validateFunctionJsDoc(Node n, JSDocInfo info) { if (info == null) { return; } if (info.containsFunctionDeclaration() && !info.hasType() && !isJSDocOnFunctionNode(n, info)) { // This JSDoc should be attached to a FUNCTION node, or an assignment // with a function as the RHS, etc. reportMisplaced( n, "function", "This JSDoc is not attached to a function node. " + "Are you missing parentheses?"); } } /** * Whether this node's JSDoc may apply to a function * * <p>This has some false positive cases, to allow for patterns like goog.abstractMethod. */ private boolean isJSDocOnFunctionNode(Node n, JSDocInfo info) { switch (n.getToken()) { case FUNCTION: case GETTER_DEF: case SETTER_DEF: case MEMBER_FUNCTION_DEF: case STRING_KEY: case COMPUTED_PROP: case EXPORT: case MEMBER_FIELD_DEF: case COMPUTED_FIELD_DEF: return true; case GETELEM: case GETPROP: if (n.getFirstChild().isQualifiedName()) { // assume qualified names may be function declarations return true; } return false; case VAR: case LET: case CONST: case ASSIGN: { Node lhs = n.getFirstChild(); Node rhs = NodeUtil.getRValueOfLValue(lhs); if (rhs != null && isClass(rhs) && !info.isConstructor()) { return false; } // TODO(b/124081098): Check that the RHS of the assignment is a // function. Note that it can be a FUNCTION node, but it can also be // a call to goog.abstractMethod, goog.functions.constant, etc. return true; } default: return false; } } /** * Checks that annotations for messages ({@code @desc}, {@code @hidden}, {@code @meaning} and * {@code @alternateMessageId}) are in the proper place, namely on names starting with MSG_ which * indicates they should be extracted for translation. A later pass checks that the right side is * a call to goog.getMsg. */ private void validateMsgJsDoc(Node n, JSDocInfo info) { if (info == null) { return; } if (info.getDescription() != null || info.isHidden() || info.getMeaning() != null || info.getAlternateMessageId() != null) { boolean descOkay = false; switch (n.getToken()) { case ASSIGN: case VAR: case LET: case CONST: descOkay = isValidMsgName(n.getFirstChild()); break; case STRING_KEY: descOkay = isValidMsgName(n); break; case GETPROP: if (n.isFromExterns() && n.isQualifiedName()) { descOkay = isValidMsgName(n); } break; default: break; } if (!descOkay) { report(n, MISPLACED_MSG_ANNOTATION); } } } /** Returns whether of not the given name is valid target for the result of goog.getMsg */ private static boolean isValidMsgName(Node nameNode) { if (nameNode.isName() || nameNode.isStringKey()) { return nameNode.getString().startsWith("MSG_"); } else if (nameNode.isQualifiedName()) { return nameNode.getString().startsWith("MSG_"); } else { return false; } } /** * Check that JSDoc with a {@code @type} annotation is in a valid place. */ private void validateTypeAnnotations(Node n, JSDocInfo info) { if (info != null && info.hasType()) { boolean valid = false; switch (n.getToken()) { // Function declarations are valid case FUNCTION: valid = NodeUtil.isFunctionDeclaration(n); break; // Object literal properties, catch declarations and variable // initializers are valid. case NAME: valid = isTypeAnnotationAllowedForName(n); break; case ARRAY_PATTERN: case OBJECT_PATTERN: // allow JSDoc like // function f(/** !Object */ {x}) {} // function f(/** !Array */ [x]) {} valid = n.getParent().isParamList(); break; // Casts, exports, and Object literal properties are valid. case CAST: case EXPORT: case STRING_KEY: case GETTER_DEF: case SETTER_DEF: case MEMBER_FIELD_DEF: case COMPUTED_FIELD_DEF: valid = true; break; // Declarations are valid iff they only contain simple names // /** @type {number} */ var x = 3; // ok // /** @type {number} */ var {x} = obj; // forbidden case VAR: case LET: case CONST: valid = !NodeUtil.isDestructuringDeclaration(n); break; // Property assignments are valid, if at the root of an expression. case ASSIGN: { Node lvalue = n.getFirstChild(); valid = n.getParent().isExprResult() && (lvalue.isGetProp() || lvalue.isGetElem() || lvalue.matchesName("exports")); break; } case GETPROP: valid = n.getParent().isExprResult() && n.isQualifiedName(); break; case CALL: valid = info.isDefine(); break; default: break; } if (!valid) { reportMisplaced(n, "type", "Type annotations are not allowed here. " + "Are you missing parentheses?"); } } } /** Is it valid to have a type annotation on the given NAME node? */ private static boolean isTypeAnnotationAllowedForName(Node n) { checkState(n.isName(), n); // Only allow type annotations on nodes used as an lvalue. if (!NodeUtil.isLValue(n)) { return false; } // Don't allow JSDoc on a name in an assignment. Simple names should only have JSDoc on them // when originally declared. Node rootTarget = NodeUtil.getRootTarget(n); return !NodeUtil.isLhsOfAssign(rootTarget); } private void reportMisplaced(Node n, String annotationName, String note) { compiler.report(JSError.make(n, MISPLACED_ANNOTATION, annotationName, note)); } private void report(Node n, DiagnosticType type, String... arguments) { compiler.report(JSError.make(n, type, arguments)); } /** * Check that an arrow function is not annotated with {@constructor}. */ private void validateArrowFunction(Node n) { if (n.isArrowFunction()) { JSDocInfo info = NodeUtil.getBestJSDocInfo(n); if (info != null && info.isConstructorOrInterface()) { report(n, ARROW_FUNCTION_AS_CONSTRUCTOR); } } } /** Check that a rest parameter has JSDoc marked as variadic. */ private void validateRestParameter(Node restParam) { if (!restParam.isRest() || !restParam.getParent().isParamList()) { return; } Node paramList = restParam.getParent(); JSDocInfo inlineInfo = restParam.getFirstChild().getJSDocInfo(); JSDocInfo functionInfo = NodeUtil.getBestJSDocInfo(paramList.getParent()); final JSTypeExpression paramTypeAnnotation; if (inlineInfo != null) { paramTypeAnnotation = inlineInfo.getType(); } else if (functionInfo != null) { if (restParam.getFirstChild().isName()) { String paramName = restParam.getFirstChild().getString(); paramTypeAnnotation = functionInfo.getParameterType(paramName); } else { // destructuring rest param. use the nth JSDoc parameter if present. the name will not match int indexOfRest = paramList.getIndexOfChild(restParam); paramTypeAnnotation = functionInfo.getParameterCount() >= indexOfRest ? functionInfo.getParameterType(functionInfo.getParameterNameAt(indexOfRest)) : null; } } else { paramTypeAnnotation = null; } if (paramTypeAnnotation != null && paramTypeAnnotation.getRoot().getToken() != Token.ITER_REST) { compiler.report(JSError.make(restParam, BAD_REST_PARAMETER_ANNOTATION)); } } /** * Check that a parameter with a default value is marked as optional. * TODO(bradfordcsmith): This is redundant. We shouldn't require it. */ private void validateDefaultValue(Node n) { if (n.isDefaultValue() && n.getParent().isParamList()) { Node targetNode = n.getFirstChild(); JSDocInfo info = targetNode.getJSDocInfo(); if (info == null) { return; } JSTypeExpression typeExpr = info.getType(); if (typeExpr == null) { return; } Node typeNode = typeExpr.getRoot(); if (typeNode.getToken() != Token.EQUALS) { report(typeNode, DEFAULT_PARAM_MUST_BE_MARKED_OPTIONAL); } } } /** * Check that @nosideeeffects annotations are only present in externs. */ private void validateNoSideEffects(Node n, JSDocInfo info) { // Cannot have @modifies or @nosideeffects in regular (non externs) js. Report errors. if (info == null) { return; } if (n.isFromExterns()) { return; } if (info.hasSideEffectsArgumentsAnnotation() || info.modifiesThis()) { report(n, INVALID_MODIFIES_ANNOTATION); } if (info.isNoSideEffects()) { report(n, INVALID_NO_SIDE_EFFECT_ANNOTATION); } } /** * Check that a let declaration is not used with {@defines} */ private void validateDefinesDeclaration(Node n, JSDocInfo info) { if (info != null && info.isDefine() && n.isLet()) { report(n, INVALID_DEFINE_ON_LET); } } /** Checks that an @implicitCast annotation is in the externs */ private void validateImplicitCast(Node n, JSDocInfo info) { if (!inExterns && info != null && info.isImplicitCast()) { report(n, TypeCheck.ILLEGAL_IMPLICIT_CAST); } } /** Checks that a @closurePrimitive {id} is on a function */ private void validateClosurePrimitive(Node n, JSDocInfo info) { if (info == null || !info.hasClosurePrimitiveId()) { return; } if (!isJSDocOnFunctionNode(n, info)) { report(n, MISPLACED_ANNOTATION, "closurePrimitive", "must be on a function node"); } } /** Checks that there are no annotations on return. */ private void validateReturnJsDoc(Node n, JSDocInfo info) { if (!n.isReturn() || info == null) { return; } // @type and @typedef are handled separately if (info.containsDeclaration() && !info.hasType() && !info.hasTypedefType()) { report(n, JSDOC_ON_RETURN); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics.geobounds; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalMetricsAggregation; import java.io.IOException; import java.util.Map; public class InternalGeoBounds extends InternalMetricsAggregation implements GeoBounds { public final static Type TYPE = new Type("geo_bounds"); public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { @Override public InternalGeoBounds readResult(StreamInput in) throws IOException { InternalGeoBounds result = new InternalGeoBounds(); result.readFrom(in); return result; } }; private double top; private double bottom; private double posLeft; private double posRight; private double negLeft; private double negRight; private boolean wrapLongitude; InternalGeoBounds() { } InternalGeoBounds(String name, double top, double bottom, double posLeft, double posRight, double negLeft, double negRight, boolean wrapLongitude, Map<String, Object> metaData) { super(name, metaData); this.top = top; this.bottom = bottom; this.posLeft = posLeft; this.posRight = posRight; this.negLeft = negLeft; this.negRight = negRight; this.wrapLongitude = wrapLongitude; } @Override public Type type() { return TYPE; } @Override public InternalAggregation reduce(ReduceContext reduceContext) { double top = Double.NEGATIVE_INFINITY; double bottom = Double.POSITIVE_INFINITY; double posLeft = Double.POSITIVE_INFINITY; double posRight = Double.NEGATIVE_INFINITY; double negLeft = Double.POSITIVE_INFINITY; double negRight = Double.NEGATIVE_INFINITY; for (InternalAggregation aggregation : reduceContext.aggregations()) { InternalGeoBounds bounds = (InternalGeoBounds) aggregation; if (bounds.top > top) { top = bounds.top; } if (bounds.bottom < bottom) { bottom = bounds.bottom; } if (bounds.posLeft < posLeft) { posLeft = bounds.posLeft; } if (bounds.posRight > posRight) { posRight = bounds.posRight; } if (bounds.negLeft < negLeft) { negLeft = bounds.negLeft; } if (bounds.negRight > negRight) { negRight = bounds.negRight; } } return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, getMetaData()); } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { GeoPoint topLeft = topLeft(); GeoPoint bottomRight = bottomRight(); if (topLeft != null) { builder.startObject("bounds"); builder.startObject("top_left"); builder.field("lat", topLeft.lat()); builder.field("lon", topLeft.lon()); builder.endObject(); builder.startObject("bottom_right"); builder.field("lat", bottomRight.lat()); builder.field("lon", bottomRight.lon()); builder.endObject(); builder.endObject(); } return builder; } @Override protected void doReadFrom(StreamInput in) throws IOException { top = in.readDouble(); bottom = in.readDouble(); posLeft = in.readDouble(); posRight = in.readDouble(); negLeft = in.readDouble(); negRight = in.readDouble(); wrapLongitude = in.readBoolean(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeDouble(top); out.writeDouble(bottom); out.writeDouble(posLeft); out.writeDouble(posRight); out.writeDouble(negLeft); out.writeDouble(negRight); out.writeBoolean(wrapLongitude); } public static void registerStream() { AggregationStreams.registerStream(STREAM, TYPE.stream()); } private static class BoundingBox { private final GeoPoint topLeft; private final GeoPoint bottomRight; public BoundingBox(GeoPoint topLeft, GeoPoint bottomRight) { this.topLeft = topLeft; this.bottomRight = bottomRight; } public GeoPoint topLeft() { return topLeft; } public GeoPoint bottomRight() { return bottomRight; } } private BoundingBox resolveBoundingBox() { if (Double.isInfinite(top)) { return null; } else if (Double.isInfinite(posLeft)) { return new BoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, negRight)); } else if (Double.isInfinite(negLeft)) { return new BoundingBox(new GeoPoint(top, posLeft), new GeoPoint(bottom, posRight)); } else if (wrapLongitude) { double unwrappedWidth = posRight - negLeft; double wrappedWidth = (180 - posLeft) - (-180 - negRight); if (unwrappedWidth <= wrappedWidth) { return new BoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, posRight)); } else { return new BoundingBox(new GeoPoint(top, posLeft), new GeoPoint(bottom, negRight)); } } else { return new BoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, posRight)); } } @Override public GeoPoint topLeft() { BoundingBox boundingBox = resolveBoundingBox(); if (boundingBox == null) { return null; } else { return boundingBox.topLeft(); } } @Override public GeoPoint bottomRight() { BoundingBox boundingBox = resolveBoundingBox(); if (boundingBox == null) { return null; } else { return boundingBox.bottomRight(); } } }
/* Launch4j (http://launch4j.sourceforge.net/) Cross-platform Java application wrapper for creating Windows native executables. Copyright (c) 2004, 2007 Grzegorz Kowal All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Launch4j nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* * Created on 2005-05-09 */ package net.sf.launch4j.formimpl; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JToolBar; import javax.swing.UIManager; import com.jgoodies.looks.Options; import com.jgoodies.looks.plastic.PlasticXPLookAndFeel; import com.jgoodies.looks.windows.WindowsLookAndFeel; import foxtrot.Task; import foxtrot.Worker; import net.sf.launch4j.Builder; import net.sf.launch4j.BuilderException; import net.sf.launch4j.ExecException; import net.sf.launch4j.FileChooserFilter; import net.sf.launch4j.Log; import net.sf.launch4j.Main; import net.sf.launch4j.Util; import net.sf.launch4j.binding.Binding; import net.sf.launch4j.binding.BindingException; import net.sf.launch4j.binding.InvariantViolationException; import net.sf.launch4j.config.Config; import net.sf.launch4j.config.ConfigPersister; import net.sf.launch4j.config.ConfigPersisterException; /** * @author Copyright (C) 2005 Grzegorz Kowal */ public class MainFrame extends JFrame { private static MainFrame _instance; private final JToolBar _toolBar; private final JButton _runButton; private final ConfigFormImpl _configForm; private final JFileChooser _fileChooser = new FileChooser(MainFrame.class); private File _outfile; private boolean _saved = false; public static void createInstance() { try { Toolkit.getDefaultToolkit().setDynamicLayout(true); System.setProperty("sun.awt.noerasebackground","true"); // JGoodies Options.setDefaultIconSize(new Dimension(16, 16)); // menu icons Options.setUseNarrowButtons(false); Options.setPopupDropShadowEnabled(true); UIManager.setLookAndFeel(System.getProperty("os.name").toLowerCase().startsWith("windows") ? new WindowsLookAndFeel() : new PlasticXPLookAndFeel()); _instance = new MainFrame(); } catch (Exception e) { System.err.println(e); } } public static MainFrame getInstance() { return _instance; } public MainFrame() { showConfigName(null); setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); addWindowListener(new MainFrameListener()); setGlassPane(new GlassPane(this)); _fileChooser.setFileFilter(new FileChooserFilter( Messages.getString("MainFrame.config.files"), new String[] {".xml", ".cfg"})); _toolBar = new JToolBar(); _toolBar.setFloatable(false); _toolBar.setRollover(true); addButton("images/new.png", Messages.getString("MainFrame.new.config"), new NewActionListener()); addButton("images/open.png", Messages.getString("MainFrame.open.config"), new OpenActionListener()); addButton("images/save.png", Messages.getString("MainFrame.save.config"), new SaveActionListener()); _toolBar.addSeparator(); addButton("images/build.png", Messages.getString("MainFrame.build.wrapper"), new BuildActionListener()); _runButton = addButton("images/run.png", Messages.getString("MainFrame.test.wrapper"), new RunActionListener()); setRunEnabled(false); _toolBar.addSeparator(); addButton("images/info.png", Messages.getString("MainFrame.about.launch4j"), new AboutActionListener()); _configForm = new ConfigFormImpl(); getContentPane().setLayout(new BorderLayout()); getContentPane().add(_toolBar, BorderLayout.NORTH); getContentPane().add(_configForm, BorderLayout.CENTER); pack(); Dimension scr = Toolkit.getDefaultToolkit().getScreenSize(); Dimension fr = getSize(); fr.width += 25; fr.height += 100; setBounds((scr.width - fr.width) / 2, (scr.height - fr.height) / 2, fr.width, fr.height); setVisible(true); } private JButton addButton(String iconPath, String tooltip, ActionListener l) { ImageIcon icon = new ImageIcon(MainFrame.class.getClassLoader() .getResource(iconPath)); JButton b = new JButton(icon); b.setToolTipText(tooltip); b.addActionListener(l); _toolBar.add(b); return b; } public void info(String text) { JOptionPane.showMessageDialog(this, text, Main.getName(), JOptionPane.INFORMATION_MESSAGE); } public void warn(String text) { JOptionPane.showMessageDialog(this, text, Main.getName(), JOptionPane.WARNING_MESSAGE); } public void warn(InvariantViolationException e) { Binding b = e.getBinding(); if (b != null) { b.markInvalid(); } warn(e.getMessage()); if (b != null) { e.getBinding().markValid(); } } public boolean confirm(String text) { return JOptionPane.showConfirmDialog(MainFrame.this, text, Messages.getString("MainFrame.confirm"), JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION; } private boolean canDiscardChanges() { return (!_configForm.isModified()) || confirm(Messages.getString("MainFrame.discard.changes")); } private boolean save() { try { _configForm.get(ConfigPersister.getInstance().getConfig()); if (_fileChooser.showSaveDialog(MainFrame.this) == JOptionPane.YES_OPTION) { File f = _fileChooser.getSelectedFile(); if (!f.getPath().endsWith(".xml")) { f = new File(f.getPath() + ".xml"); } ConfigPersister.getInstance().save(f); _saved = true; showConfigName(f); return true; } return false; } catch (InvariantViolationException ex) { warn(ex); return false; } catch (BindingException ex) { warn(ex.getMessage()); return false; } catch (ConfigPersisterException ex) { warn(ex.getMessage()); return false; } } private void showConfigName(File config) { setTitle(Main.getName() + " - " + (config != null ? config.getName() : Messages.getString("MainFrame.untitled"))); } private void setRunEnabled(boolean enabled) { if (!enabled) { _outfile = null; } _runButton.setEnabled(enabled); } private void clearConfig() { ConfigPersister.getInstance().createBlank(); _configForm.clear(ConfigPersister.getInstance().getConfig()); } private class MainFrameListener extends WindowAdapter { public void windowOpened(WindowEvent e) { clearConfig(); } public void windowClosing(WindowEvent e) { if (canDiscardChanges()) { System.exit(0); } } } private class NewActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { if (canDiscardChanges()) { clearConfig(); _saved = false; showConfigName(null); setRunEnabled(false); } } } private class OpenActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { try { if (canDiscardChanges() && _fileChooser.showOpenDialog(MainFrame.this) == JOptionPane.YES_OPTION) { final File f = _fileChooser.getSelectedFile(); if (f.getPath().endsWith(".xml")) { ConfigPersister.getInstance().load(f); _saved = true; } else { ConfigPersister.getInstance().loadVersion1(f); _saved = false; } _configForm.put(ConfigPersister.getInstance().getConfig()); showConfigName(f); setRunEnabled(false); } } catch (ConfigPersisterException ex) { warn(ex.getMessage()); } catch (BindingException ex) { warn(ex.getMessage()); } } } private class SaveActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { save(); } } private class BuildActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { final Log log = Log.getSwingLog(_configForm.getLogTextArea()); try { if ((!_saved || _configForm.isModified()) && !save()) { return; } log.clear(); ConfigPersister.getInstance().getConfig().checkInvariants(); Builder b = new Builder(log); _outfile = b.build(); setRunEnabled(ConfigPersister.getInstance().getConfig() .getHeaderType() == Config.GUI_HEADER // TODO fix console app test && (Util.WINDOWS_OS || !ConfigPersister.getInstance() .getConfig().isDontWrapJar())); } catch (InvariantViolationException ex) { setRunEnabled(false); ex.setBinding(_configForm.getBinding(ex.getProperty())); warn(ex); } catch (BuilderException ex) { setRunEnabled(false); log.append(ex.getMessage()); } } } private class RunActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { try { getGlassPane().setVisible(true); Worker.post(new Task() { public Object run() throws ExecException { Log log = Log.getSwingLog(_configForm.getLogTextArea()); log.clear(); String path = _outfile.getPath(); if (Util.WINDOWS_OS) { log.append(Messages.getString("MainFrame.executing") + path); Util.exec(new String[] { path, "--l4j-debug" }, log); } else { log.append(Messages.getString("MainFrame.jar.integrity.test") + path); Util.exec(new String[] { "java", "-jar", path }, log); } return null; } }); } catch (Exception ex) { // XXX errors logged by exec } finally { getGlassPane().setVisible(false); } }; } private class AboutActionListener implements ActionListener { public void actionPerformed(ActionEvent e) { info(Main.getDescription()); } } }
/** * Copyright 2011 Google Inc. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fuelcoinj.core; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; import static com.fuelcoinj.core.Utils.*; /** * <p>Methods to serialize and de-serialize messages to the Peercoin network format as defined in * <a href="https://en.peercoin.it/wiki/Protocol_specification">the protocol specification</a>.</p> * * <p>To be able to serialize and deserialize new Message subclasses the following criteria needs to be met.</p> * * <ul> * <li>The proper Class instance needs to be mapped to its message name in the names variable below</li> * <li>There needs to be a constructor matching: NetworkParameters params, byte[] payload</li> * <li>Message.peercoinSerializeToStream() needs to be properly subclassed</li> * </ul> */ public class PeercoinSerializer { private static final Logger log = LoggerFactory.getLogger(PeercoinSerializer.class); private static final int COMMAND_LEN = 12; private NetworkParameters params; private boolean parseLazy = false; private boolean parseRetain = false; private static Map<Class<? extends Message>, String> names = new HashMap<Class<? extends Message>, String>(); static { names.put(VersionMessage.class, "version"); names.put(InventoryMessage.class, "inv"); names.put(Block.class, "block"); names.put(GetDataMessage.class, "getdata"); names.put(Transaction.class, "tx"); names.put(AddressMessage.class, "addr"); names.put(Ping.class, "ping"); names.put(Pong.class, "pong"); names.put(VersionAck.class, "verack"); names.put(GetBlocksMessage.class, "getblocks"); names.put(GetHeadersMessage.class, "getheaders"); names.put(GetAddrMessage.class, "getaddr"); names.put(HeadersMessage.class, "headers"); names.put(BloomFilter.class, "filterload"); names.put(FilteredBlock.class, "merkleblock"); names.put(NotFoundMessage.class, "notfound"); names.put(MemoryPoolMessage.class, "mempool"); } /** * Constructs a PeercoinSerializer with the given behavior. * * @param params networkParams used to create Messages instances and termining packetMagic */ public PeercoinSerializer(NetworkParameters params) { this(params, false, false); } /** * Constructs a PeercoinSerializer with the given behavior. * * @param params networkParams used to create Messages instances and termining packetMagic * @param parseLazy deserialize messages in lazy mode. * @param parseRetain retain the backing byte array of a message for fast reserialization. */ public PeercoinSerializer(NetworkParameters params, boolean parseLazy, boolean parseRetain) { this.params = params; this.parseLazy = parseLazy; this.parseRetain = parseRetain; } /** * Writes message to to the output stream. */ public void serialize(String name, byte[] message, OutputStream out) throws IOException { byte[] header = new byte[4 + COMMAND_LEN + 4 + 4 /* checksum */]; uint32ToByteArrayBE(params.getPacketMagic(), header, 0); // The header array is initialized to zero by Java so we don't have to worry about // NULL terminating the string here. for (int i = 0; i < name.length() && i < COMMAND_LEN; i++) { header[4 + i] = (byte) (name.codePointAt(i) & 0xFF); } Utils.uint32ToByteArrayLE(message.length, header, 4 + COMMAND_LEN); byte[] hash = doubleDigest(message); System.arraycopy(hash, 0, header, 4 + COMMAND_LEN + 4, 4); out.write(header); out.write(message); if (log.isDebugEnabled()) log.debug("Sending {} message: {}", name, HEX.encode(header) + HEX.encode(message)); } /** * Writes message to to the output stream. */ public void serialize(Message message, OutputStream out) throws IOException { String name = names.get(message.getClass()); if (name == null) { throw new Error("FuelcoinSerializer doesn't currently know how to serialize " + message.getClass()); } serialize(name, message.peercoinSerialize(), out); } /** * Reads a message from the given ByteBuffer and returns it. */ public Message deserialize(ByteBuffer in) throws ProtocolException, IOException { // A Fuelcoin protocol message has the following format. // // - 4 byte magic number: 0xfabfb5da for the testnet or // 0xf9beb4d9 for production // - 12 byte command in ASCII // - 4 byte payload size // - 4 byte checksum // - Payload data // // The checksum is the first 4 bytes of a SHA256 hash of the message payload. It isn't // present for all messages, notably, the first one on a connection. // // Satoshi's implementation ignores garbage before the magic header bytes. We have to do the same because // sometimes it sends us stuff that isn't part of any message. seekPastMagicBytes(in); PeercoinPacketHeader header = new PeercoinPacketHeader(in); // Now try to read the whole message. return deserializePayload(header, in); } /** * Deserializes only the header in case packet meta data is needed before decoding * the payload. This method assumes you have already called seekPastMagicBytes() */ public PeercoinPacketHeader deserializeHeader(ByteBuffer in) throws ProtocolException, IOException { return new PeercoinPacketHeader(in); } /** * Deserialize payload only. You must provide a header, typically obtained by calling * {@link PeercoinSerializer#deserializeHeader}. */ public Message deserializePayload(PeercoinPacketHeader header, ByteBuffer in) throws ProtocolException, BufferUnderflowException { byte[] payloadBytes = new byte[header.size]; in.get(payloadBytes, 0, header.size); // Verify the checksum. byte[] hash; hash = doubleDigest(payloadBytes); if (header.checksum[0] != hash[0] || header.checksum[1] != hash[1] || header.checksum[2] != hash[2] || header.checksum[3] != hash[3]) { throw new ProtocolException("Checksum failed to verify, actual " + HEX.encode(hash) + " vs " + HEX.encode(header.checksum)); } if (log.isDebugEnabled()) { log.debug("Received {} byte '{}' message: {}", header.size, header.command, HEX.encode(payloadBytes)); } try { return makeMessage(header.command, header.size, payloadBytes, hash, header.checksum); } catch (Exception e) { throw new ProtocolException("Error deserializing message " + HEX.encode(payloadBytes) + "\n", e); } } private Message makeMessage(String command, int length, byte[] payloadBytes, byte[] hash, byte[] checksum) throws ProtocolException { // We use an if ladder rather than reflection because reflection is very slow on Android. Message message; if (command.equals("version")) { return new VersionMessage(params, payloadBytes); } else if (command.equals("inv")) { message = new InventoryMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("block")) { message = new Block(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("merkleblock")) { message = new FilteredBlock(params, payloadBytes); } else if (command.equals("getdata")) { message = new GetDataMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("getblocks")) { message = new GetBlocksMessage(params, payloadBytes); } else if (command.equals("getheaders")) { message = new GetHeadersMessage(params, payloadBytes); } else if (command.equals("tx")) { Transaction tx = new Transaction(params, payloadBytes, null, parseLazy, parseRetain, length); if (hash != null) tx.setHash(new Sha256Hash(Utils.reverseBytes(hash))); message = tx; } else if (command.equals("addr")) { message = new AddressMessage(params, payloadBytes, parseLazy, parseRetain, length); } else if (command.equals("ping")) { message = new Ping(params, payloadBytes); } else if (command.equals("pong")) { message = new Pong(params, payloadBytes); } else if (command.equals("verack")) { return new VersionAck(params, payloadBytes); } else if (command.equals("headers")) { return new HeadersMessage(params, payloadBytes); } else if (command.equals("alert")) { return new AlertMessage(params, payloadBytes); } else if (command.equals("filterload")) { return new BloomFilter(params, payloadBytes); } else if (command.equals("notfound")) { return new NotFoundMessage(params, payloadBytes); } else if (command.equals("mempool")) { return new MemoryPoolMessage(); } else { log.warn("No support for deserializing message with name {}", command); return new UnknownMessage(params, command, payloadBytes); } if (checksum != null) message.setChecksum(checksum); return message; } public void seekPastMagicBytes(ByteBuffer in) throws BufferUnderflowException { int magicCursor = 3; // Which byte of the magic we're looking for currently. while (true) { byte b = in.get(); // We're looking for a run of bytes that is the same as the packet magic but we want to ignore partial // magics that aren't complete. So we keep track of where we're up to with magicCursor. byte expectedByte = (byte)(0xFF & params.getPacketMagic() >>> (magicCursor * 8)); if (b == expectedByte) { magicCursor--; if (magicCursor < 0) { // We found the magic sequence. return; } else { // We still have further to go to find the next message. } } else { magicCursor = 3; } } } /** * Whether the serializer will produce lazy parse mode Messages */ public boolean isParseLazyMode() { return parseLazy; } /** * Whether the serializer will produce cached mode Messages */ public boolean isParseRetainMode() { return parseRetain; } public static class PeercoinPacketHeader { /** The largest number of bytes that a header can represent */ public static final int HEADER_LENGTH = COMMAND_LEN + 4 + 4; public final byte[] header; public final String command; public final int size; public final byte[] checksum; public PeercoinPacketHeader(ByteBuffer in) throws ProtocolException, BufferUnderflowException { header = new byte[HEADER_LENGTH]; in.get(header, 0, header.length); int cursor = 0; // The command is a NULL terminated string, unless the command fills all twelve bytes // in which case the termination is implicit. for (; header[cursor] != 0 && cursor < COMMAND_LEN; cursor++) ; byte[] commandBytes = new byte[cursor]; System.arraycopy(header, 0, commandBytes, 0, cursor); try { command = new String(commandBytes, "US-ASCII"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // Cannot happen. } cursor = COMMAND_LEN; size = (int) readUint32(header, cursor); cursor += 4; if (size > Message.MAX_SIZE) throw new ProtocolException("Message size too large: " + size); // Old clients don't send the checksum. checksum = new byte[4]; // Note that the size read above includes the checksum bytes. System.arraycopy(header, cursor, checksum, 0, 4); cursor += 4; } } }
/*PLEASE DO NOT EDIT THIS CODE*/ /*This code was generated using the UMPLE 1.21.0.4666 modeling language!*/ import java.util.*; // line 30 "VendingMachineClassDiagram.ump" // line 77 "VendingMachineClassDiagram.ump" public class CoinType { //------------------------ // MEMBER VARIABLES //------------------------ //CoinType Attributes private int value; private int weight; private int diameter; //CoinType Associations private List<CoinHolder> coinHolders; //------------------------ // CONSTRUCTOR //------------------------ public CoinType(int aValue, int aWeight, int aDiameter) { value = aValue; weight = aWeight; diameter = aDiameter; coinHolders = new ArrayList<CoinHolder>(); } //------------------------ // INTERFACE //------------------------ public boolean setValue(int aValue) { boolean wasSet = false; value = aValue; wasSet = true; return wasSet; } public boolean setWeight(int aWeight) { boolean wasSet = false; weight = aWeight; wasSet = true; return wasSet; } public boolean setDiameter(int aDiameter) { boolean wasSet = false; diameter = aDiameter; wasSet = true; return wasSet; } /** * cents */ public int getValue() { return value; } /** * grams */ public int getWeight() { return weight; } /** * micrometers */ public int getDiameter() { return diameter; } public CoinHolder getCoinHolder(int index) { CoinHolder aCoinHolder = coinHolders.get(index); return aCoinHolder; } public List<CoinHolder> getCoinHolders() { List<CoinHolder> newCoinHolders = Collections.unmodifiableList(coinHolders); return newCoinHolders; } public int numberOfCoinHolders() { int number = coinHolders.size(); return number; } public boolean hasCoinHolders() { boolean has = coinHolders.size() > 0; return has; } public int indexOfCoinHolder(CoinHolder aCoinHolder) { int index = coinHolders.indexOf(aCoinHolder); return index; } public static int minimumNumberOfCoinHolders() { return 0; } public CoinHolder addCoinHolder(int aNumCoinsCapacity, int aCurrentNumberOfCoins, VendingMachine aVendingMachine) { return new CoinHolder(aNumCoinsCapacity, aCurrentNumberOfCoins, this, aVendingMachine); } public boolean addCoinHolder(CoinHolder aCoinHolder) { boolean wasAdded = false; if (coinHolders.contains(aCoinHolder)) { return false; } CoinType existingCanHold = aCoinHolder.getCanHold(); boolean isNewCanHold = existingCanHold != null && !this.equals(existingCanHold); if (isNewCanHold) { aCoinHolder.setCanHold(this); } else { coinHolders.add(aCoinHolder); } wasAdded = true; return wasAdded; } public boolean removeCoinHolder(CoinHolder aCoinHolder) { boolean wasRemoved = false; //Unable to remove aCoinHolder, as it must always have a canHold if (!this.equals(aCoinHolder.getCanHold())) { coinHolders.remove(aCoinHolder); wasRemoved = true; } return wasRemoved; } public boolean addCoinHolderAt(CoinHolder aCoinHolder, int index) { boolean wasAdded = false; if(addCoinHolder(aCoinHolder)) { if(index < 0 ) { index = 0; } if(index > numberOfCoinHolders()) { index = numberOfCoinHolders() - 1; } coinHolders.remove(aCoinHolder); coinHolders.add(index, aCoinHolder); wasAdded = true; } return wasAdded; } public boolean addOrMoveCoinHolderAt(CoinHolder aCoinHolder, int index) { boolean wasAdded = false; if(coinHolders.contains(aCoinHolder)) { if(index < 0 ) { index = 0; } if(index > numberOfCoinHolders()) { index = numberOfCoinHolders() - 1; } coinHolders.remove(aCoinHolder); coinHolders.add(index, aCoinHolder); wasAdded = true; } else { wasAdded = addCoinHolderAt(aCoinHolder, index); } return wasAdded; } public void delete() { for(int i=coinHolders.size(); i > 0; i--) { CoinHolder aCoinHolder = coinHolders.get(i - 1); aCoinHolder.delete(); } } public String toString() { String outputString = ""; return super.toString() + "["+ "value" + ":" + getValue()+ "," + "weight" + ":" + getWeight()+ "," + "diameter" + ":" + getDiameter()+ "]" + outputString; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api.records.impl.pb; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope; import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes; import org.apache.hadoop.yarn.api.records.AMCommand; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerRetryPolicy; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerSubState; import org.apache.hadoop.yarn.api.records.ContainerUpdateType; import org.apache.hadoop.yarn.api.records.ExecutionTypeRequest; import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.LocalizationState; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.NodeUpdateType; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueState; import org.apache.hadoop.yarn.api.records.RejectionReason; import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceInformation; import org.apache.hadoop.yarn.api.records.UpdateContainerError; import org.apache.hadoop.yarn.api.records.UpdateContainerRequest; import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.resource.PlacementConstraint.TargetExpression; import org.apache.hadoop.yarn.api.resource.PlacementConstraint.TimedPlacementConstraint; import org.apache.hadoop.yarn.proto.YarnProtos; import org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto; import org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto; import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto; import org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto; import org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto; import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; import org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto; import org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto; import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto; import org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto; import org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos; import org.apache.hadoop.yarn.proto.YarnServiceProtos.ContainerUpdateTypeProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.LocalizationStateProto; import org.apache.hadoop.yarn.server.api.ContainerType; import org.apache.hadoop.thirdparty.com.google.common.collect.Interner; import org.apache.hadoop.thirdparty.com.google.common.collect.Interners; import org.apache.hadoop.thirdparty.protobuf.ByteString; /** * Utils to convert enum protos to corresponding java enums and vice versa. */ @Private @Unstable public class ProtoUtils { public static final Interner<ByteString> BYTE_STRING_INTERNER = Interners.newWeakInterner(); /* * ContainerState */ public static ContainerStateProto convertToProtoFormat(ContainerState state) { switch (state) { case NEW: return ContainerStateProto.C_NEW; case RUNNING: return ContainerStateProto.C_RUNNING; case COMPLETE: return ContainerStateProto.C_COMPLETE; default: throw new IllegalArgumentException( "ContainerState conversion unsupported"); } } public static ContainerState convertFromProtoFormat( ContainerStateProto proto) { switch (proto) { case C_NEW: return ContainerState.NEW; case C_RUNNING: return ContainerState.RUNNING; case C_COMPLETE: return ContainerState.COMPLETE; default: throw new IllegalArgumentException( "ContainerStateProto conversion unsupported"); } } /* * Container SubState */ public static ContainerSubStateProto convertToProtoFormat( ContainerSubState state) { switch (state) { case SCHEDULED: return ContainerSubStateProto.CSS_SCHEDULED; case RUNNING: return ContainerSubStateProto.CSS_RUNNING; case PAUSED: return ContainerSubStateProto.CSS_PAUSED; case COMPLETING: return ContainerSubStateProto.CSS_COMPLETING; case DONE: return ContainerSubStateProto.CSS_DONE; default: throw new IllegalArgumentException( "ContainerSubState conversion unsupported"); } } public static ContainerSubState convertFromProtoFormat( ContainerSubStateProto proto) { switch (proto) { case CSS_SCHEDULED: return ContainerSubState.SCHEDULED; case CSS_RUNNING: return ContainerSubState.RUNNING; case CSS_PAUSED: return ContainerSubState.PAUSED; case CSS_COMPLETING: return ContainerSubState.COMPLETING; case CSS_DONE: return ContainerSubState.DONE; default: throw new IllegalArgumentException( "ContainerSubStateProto conversion unsupported"); } } /* * NodeState */ private final static String NODE_STATE_PREFIX = "NS_"; public static NodeStateProto convertToProtoFormat(NodeState e) { return NodeStateProto.valueOf(NODE_STATE_PREFIX + e.name()); } public static NodeState convertFromProtoFormat(NodeStateProto e) { return NodeState.valueOf(e.name().replace(NODE_STATE_PREFIX, "")); } /* * NodeId */ public static NodeIdProto convertToProtoFormat(NodeId e) { return ((NodeIdPBImpl)e).getProto(); } public static NodeId convertFromProtoFormat(NodeIdProto e) { return new NodeIdPBImpl(e); } /* * YarnApplicationState */ public static YarnApplicationStateProto convertToProtoFormat(YarnApplicationState e) { return YarnApplicationStateProto.valueOf(e.name()); } public static YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) { return YarnApplicationState.valueOf(e.name()); } /* * YarnApplicationAttemptState */ private static String YARN_APPLICATION_ATTEMPT_STATE_PREFIX = "APP_ATTEMPT_"; public static YarnApplicationAttemptStateProto convertToProtoFormat( YarnApplicationAttemptState e) { return YarnApplicationAttemptStateProto .valueOf(YARN_APPLICATION_ATTEMPT_STATE_PREFIX + e.name()); } public static YarnApplicationAttemptState convertFromProtoFormat( YarnApplicationAttemptStateProto e) { return YarnApplicationAttemptState.valueOf(e.name().replace( YARN_APPLICATION_ATTEMPT_STATE_PREFIX, "")); } /* * ApplicationsRequestScope */ public static YarnServiceProtos.ApplicationsRequestScopeProto convertToProtoFormat(ApplicationsRequestScope e) { return YarnServiceProtos.ApplicationsRequestScopeProto.valueOf(e.name()); } public static ApplicationsRequestScope convertFromProtoFormat (YarnServiceProtos.ApplicationsRequestScopeProto e) { return ApplicationsRequestScope.valueOf(e.name()); } /* * ApplicationResourceUsageReport */ public static ApplicationResourceUsageReportProto convertToProtoFormat(ApplicationResourceUsageReport e) { return ((ApplicationResourceUsageReportPBImpl)e).getProto(); } public static ApplicationResourceUsageReport convertFromProtoFormat(ApplicationResourceUsageReportProto e) { return new ApplicationResourceUsageReportPBImpl(e); } /* * FinalApplicationStatus */ private static String FINAL_APPLICATION_STATUS_PREFIX = "APP_"; public static FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus e) { return FinalApplicationStatusProto.valueOf(FINAL_APPLICATION_STATUS_PREFIX + e.name()); } public static FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto e) { return FinalApplicationStatus.valueOf(e.name().replace(FINAL_APPLICATION_STATUS_PREFIX, "")); } /* * LocalResourceType */ public static LocalResourceTypeProto convertToProtoFormat(LocalResourceType e) { return LocalResourceTypeProto.valueOf(e.name()); } public static LocalResourceType convertFromProtoFormat(LocalResourceTypeProto e) { return LocalResourceType.valueOf(e.name()); } /* * LocalResourceVisibility */ public static LocalResourceVisibilityProto convertToProtoFormat(LocalResourceVisibility e) { return LocalResourceVisibilityProto.valueOf(e.name()); } public static LocalResourceVisibility convertFromProtoFormat(LocalResourceVisibilityProto e) { return LocalResourceVisibility.valueOf(e.name()); } /* * AMCommand */ public static AMCommandProto convertToProtoFormat(AMCommand e) { return AMCommandProto.valueOf(e.name()); } public static AMCommand convertFromProtoFormat(AMCommandProto e) { return AMCommand.valueOf(e.name()); } /* * RejectionReason */ private static final String REJECTION_REASON_PREFIX = "RRP_"; public static YarnProtos.RejectionReasonProto convertToProtoFormat( RejectionReason e) { return YarnProtos.RejectionReasonProto .valueOf(REJECTION_REASON_PREFIX + e.name()); } public static RejectionReason convertFromProtoFormat( YarnProtos.RejectionReasonProto e) { return RejectionReason.valueOf(e.name() .replace(REJECTION_REASON_PREFIX, "")); } /* * ByteBuffer */ public static ByteBuffer convertFromProtoFormat(ByteString byteString) { int capacity = byteString.asReadOnlyByteBuffer().rewind().remaining(); byte[] b = new byte[capacity]; byteString.asReadOnlyByteBuffer().get(b, 0, capacity); return ByteBuffer.wrap(b); } public static ByteString convertToProtoFormat(ByteBuffer byteBuffer) { // return ByteString.copyFrom((ByteBuffer)byteBuffer.duplicate().rewind()); int oldPos = byteBuffer.position(); byteBuffer.rewind(); ByteString bs = ByteString.copyFrom(byteBuffer); byteBuffer.position(oldPos); return bs; } /* * QueueState */ private static String QUEUE_STATE_PREFIX = "Q_"; public static QueueStateProto convertToProtoFormat(QueueState e) { return QueueStateProto.valueOf(QUEUE_STATE_PREFIX + e.name()); } public static QueueState convertFromProtoFormat(QueueStateProto e) { return QueueState.valueOf(e.name().replace(QUEUE_STATE_PREFIX, "")); } /* * QueueACL */ private static String QUEUE_ACL_PREFIX = "QACL_"; public static QueueACLProto convertToProtoFormat(QueueACL e) { return QueueACLProto.valueOf(QUEUE_ACL_PREFIX + e.name()); } public static QueueACL convertFromProtoFormat(QueueACLProto e) { return QueueACL.valueOf(e.name().replace(QUEUE_ACL_PREFIX, "")); } /* * ApplicationAccessType */ private static String APP_ACCESS_TYPE_PREFIX = "APPACCESS_"; public static ApplicationAccessTypeProto convertToProtoFormat( ApplicationAccessType e) { return ApplicationAccessTypeProto.valueOf(APP_ACCESS_TYPE_PREFIX + e.name()); } public static ApplicationAccessType convertFromProtoFormat( ApplicationAccessTypeProto e) { return ApplicationAccessType.valueOf(e.name().replace( APP_ACCESS_TYPE_PREFIX, "")); } /* * ApplicationTimeoutType */ private static String APP_TIMEOUT_TYPE_PREFIX = "APP_TIMEOUT_"; public static ApplicationTimeoutTypeProto convertToProtoFormat( ApplicationTimeoutType e) { return ApplicationTimeoutTypeProto .valueOf(APP_TIMEOUT_TYPE_PREFIX + e.name()); } public static ApplicationTimeoutType convertFromProtoFormat( ApplicationTimeoutTypeProto e) { return ApplicationTimeoutType .valueOf(e.name().replace(APP_TIMEOUT_TYPE_PREFIX, "")); } /* * Reservation Request interpreter type */ public static ReservationRequestInterpreterProto convertToProtoFormat( ReservationRequestInterpreter e) { return ReservationRequestInterpreterProto.valueOf(e.name()); } public static ReservationRequestInterpreter convertFromProtoFormat( ReservationRequestInterpreterProto e) { return ReservationRequestInterpreter.valueOf(e.name()); } /* * Log Aggregation Status */ private static final String LOG_AGGREGATION_STATUS_PREFIX = "LOG_"; private static final int LOG_AGGREGATION_STATUS_PREFIX_LEN = LOG_AGGREGATION_STATUS_PREFIX.length(); public static LogAggregationStatusProto convertToProtoFormat( LogAggregationStatus e) { return LogAggregationStatusProto.valueOf(LOG_AGGREGATION_STATUS_PREFIX + e.name()); } public static LogAggregationStatus convertFromProtoFormat( LogAggregationStatusProto e) { return LogAggregationStatus.valueOf(e.name().substring( LOG_AGGREGATION_STATUS_PREFIX_LEN)); } /* * ContainerType */ public static ContainerTypeProto convertToProtoFormat(ContainerType e) { return ContainerTypeProto.valueOf(e.name()); } public static ContainerType convertFromProtoFormat(ContainerTypeProto e) { return ContainerType.valueOf(e.name()); } /* * NodeUpdateType */ public static NodeUpdateTypeProto convertToProtoFormat(NodeUpdateType e) { return NodeUpdateTypeProto.valueOf(e.name()); } public static NodeUpdateType convertFromProtoFormat(NodeUpdateTypeProto e) { return NodeUpdateType.valueOf(e.name()); } /* * ExecutionType */ public static ExecutionTypeProto convertToProtoFormat(ExecutionType e) { return ExecutionTypeProto.valueOf(e.name()); } public static ExecutionType convertFromProtoFormat(ExecutionTypeProto e) { return ExecutionType.valueOf(e.name()); } /* * ContainerUpdateType */ public static ContainerUpdateTypeProto convertToProtoFormat( ContainerUpdateType e) { return ContainerUpdateTypeProto.valueOf(e.name()); } public static ContainerUpdateType convertFromProtoFormat( ContainerUpdateTypeProto e) { return ContainerUpdateType.valueOf(e.name()); } /* * Resource */ public static ResourceProto convertToProtoFormat(Resource r) { return ResourcePBImpl.getProto(r); } public static Resource convertFromProtoFormat(ResourceProto resource) { return new ResourcePBImpl(resource); } /* * ContainerRetryPolicy */ public static ContainerRetryPolicyProto convertToProtoFormat( ContainerRetryPolicy e) { return ContainerRetryPolicyProto.valueOf(e.name()); } public static ContainerRetryPolicy convertFromProtoFormat( ContainerRetryPolicyProto e) { return ContainerRetryPolicy.valueOf(e.name()); } /* * ExecutionTypeRequest */ public static ExecutionTypeRequestProto convertToProtoFormat( ExecutionTypeRequest e) { return ((ExecutionTypeRequestPBImpl)e).getProto(); } public static ExecutionTypeRequest convertFromProtoFormat( ExecutionTypeRequestProto e) { return new ExecutionTypeRequestPBImpl(e); } /* * Container */ public static YarnProtos.ContainerProto convertToProtoFormat( Container t) { return ((ContainerPBImpl)t).getProto(); } public static ContainerPBImpl convertFromProtoFormat( YarnProtos.ContainerProto t) { return new ContainerPBImpl(t); } public static ContainerStatusPBImpl convertFromProtoFormat( YarnProtos.ContainerStatusProto p) { return new ContainerStatusPBImpl(p); } /* * ContainerId */ public static ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { return new ContainerIdPBImpl(p); } public static ContainerIdProto convertToProtoFormat(ContainerId t) { return ((ContainerIdPBImpl) t).getProto(); } /* * UpdateContainerRequest */ public static UpdateContainerRequestPBImpl convertFromProtoFormat( YarnServiceProtos.UpdateContainerRequestProto p) { return new UpdateContainerRequestPBImpl(p); } public static YarnServiceProtos.UpdateContainerRequestProto convertToProtoFormat(UpdateContainerRequest t) { return ((UpdateContainerRequestPBImpl) t).getProto(); } /* * UpdateContainerError */ public static UpdateContainerErrorPBImpl convertFromProtoFormat( YarnServiceProtos.UpdateContainerErrorProto p) { return new UpdateContainerErrorPBImpl(p); } public static YarnServiceProtos.UpdateContainerErrorProto convertToProtoFormat(UpdateContainerError t) { return ((UpdateContainerErrorPBImpl) t).getProto(); } /* * ResourceTypes */ public static ResourceTypesProto converToProtoFormat(ResourceTypes e) { return ResourceTypesProto.valueOf(e.name()); } public static ResourceTypes convertFromProtoFormat(ResourceTypesProto e) { return ResourceTypes.valueOf(e.name()); } public static Map<String, Long> convertStringLongMapProtoListToMap( List<YarnProtos.StringLongMapProto> pList) { Resource tmp = Resource.newInstance(0, 0); Map<String, Long> ret = new HashMap<>(); for (ResourceInformation entry : tmp.getResources()) { ret.put(entry.getName(), 0L); } if (pList != null) { for (YarnProtos.StringLongMapProto p : pList) { ret.put(p.getKey(), p.getValue()); } } return ret; } public static List<YarnProtos.StringLongMapProto> convertMapToStringLongMapProtoList( Map<String, Long> map) { List<YarnProtos.StringLongMapProto> ret = new ArrayList<>(); for (Map.Entry<String, Long> entry : map.entrySet()) { YarnProtos.StringLongMapProto.Builder tmp = YarnProtos.StringLongMapProto.newBuilder(); tmp.setKey(entry.getKey()); tmp.setValue(entry.getValue()); ret.add(tmp.build()); } return ret; } public static List<YarnProtos.StringFloatMapProto> convertMapToStringFloatMapProtoList( Map<String, Float> map) { List<YarnProtos.StringFloatMapProto> ret = new ArrayList<>(); if (map != null) { for (Map.Entry<String, Float> entry : map.entrySet()) { YarnProtos.StringFloatMapProto.Builder tmp = YarnProtos.StringFloatMapProto.newBuilder(); tmp.setKey(entry.getKey()); tmp.setValue(entry.getValue()); ret.add(tmp.build()); } } return ret; } public static Map<String, String> convertStringStringMapProtoListToMap( List<StringStringMapProto> pList) { Map<String, String> ret = new HashMap<>(); if (pList != null) { for (StringStringMapProto p : pList) { if (p.hasKey()) { ret.put(p.getKey(), p.getValue()); } } } return ret; } public static Map<String, Float> convertStringFloatMapProtoListToMap( List<YarnProtos.StringFloatMapProto> pList) { Map<String, Float> ret = new HashMap<>(); if (pList != null) { for (YarnProtos.StringFloatMapProto p : pList) { if (p.hasKey()) { ret.put(p.getKey(), p.getValue()); } } } return ret; } public static List<YarnProtos.StringStringMapProto> convertToProtoFormat( Map<String, String> stringMap) { List<YarnProtos.StringStringMapProto> pList = new ArrayList<>(); if (stringMap != null && !stringMap.isEmpty()) { StringStringMapProto.Builder pBuilder = StringStringMapProto.newBuilder(); for (Map.Entry<String, String> entry : stringMap.entrySet()) { pBuilder.setKey(entry.getKey()); pBuilder.setValue(entry.getValue()); pList.add(pBuilder.build()); } } return pList; } public static PlacementConstraintTargetProto.TargetType convertToProtoFormat( TargetExpression.TargetType t) { return PlacementConstraintTargetProto.TargetType.valueOf(t.name()); } public static TargetExpression.TargetType convertFromProtoFormat( PlacementConstraintTargetProto.TargetType t) { return TargetExpression.TargetType.valueOf(t.name()); } /* * TimedPlacementConstraint.DelayUnit */ public static TimedPlacementConstraintProto.DelayUnit convertToProtoFormat( TimedPlacementConstraint.DelayUnit u) { return TimedPlacementConstraintProto.DelayUnit.valueOf(u.name()); } public static TimedPlacementConstraint.DelayUnit convertFromProtoFormat( TimedPlacementConstraintProto.DelayUnit u) { return TimedPlacementConstraint.DelayUnit.valueOf(u.name()); } /* * ApplicationId */ public static ApplicationIdPBImpl convertFromProtoFormat( ApplicationIdProto p) { return new ApplicationIdPBImpl(p); } public static ApplicationIdProto convertToProtoFormat(ApplicationId t) { return ((ApplicationIdPBImpl) t).getProto(); } //Localization State private final static String LOCALIZATION_STATE_PREFIX = "L_"; public static LocalizationStateProto convertToProtoFormat( LocalizationState e) { return LocalizationStateProto.valueOf(LOCALIZATION_STATE_PREFIX + e.name()); } public static LocalizationState convertFromProtoFormat( LocalizationStateProto e) { return LocalizationState.valueOf(e.name() .replace(LOCALIZATION_STATE_PREFIX, "")); } }
/* * Copyright 2003-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test; import static org.eclipse.jdt.core.search.IJavaSearchConstants.CLASS; import static org.eclipse.jdt.core.search.IJavaSearchConstants.WAIT_UNTIL_READY_TO_SEARCH; import static org.eclipse.jdt.core.search.SearchEngine.createJavaSearchScope; import static org.eclipse.jdt.core.search.SearchPattern.R_CASE_SENSITIVE; import static org.eclipse.jdt.core.search.SearchPattern.R_EXACT_MATCH; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.UnsupportedEncodingException; import junit.framework.Assert; import org.codehaus.groovy.eclipse.core.builder.GroovyClasspathContainer; import org.codehaus.groovy.eclipse.core.model.GroovyProjectFacade; import org.eclipse.core.internal.events.BuildCommand; import org.eclipse.core.resources.ICommand; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceDescription; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Status; import org.eclipse.jdt.core.IClasspathEntry; import org.eclipse.jdt.core.ICompilationUnit; import org.eclipse.jdt.core.IJavaElement; import org.eclipse.jdt.core.IJavaModelMarker; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.IPackageFragment; import org.eclipse.jdt.core.IPackageFragmentRoot; import org.eclipse.jdt.core.IType; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jdt.core.JavaModelException; import org.eclipse.jdt.core.search.SearchEngine; import org.eclipse.jdt.core.search.TypeNameRequestor; import org.eclipse.jdt.internal.compiler.impl.CompilerOptions; import org.eclipse.jdt.internal.core.DefaultWorkingCopyOwner; import org.eclipse.jdt.internal.core.JavaModelManager; import org.eclipse.jdt.internal.core.JavaProject; import org.eclipse.jdt.launching.JavaRuntime; /** ripped of the Groovy Eclipse plugin tests */ public class TestProject { public static final String TEST_PROJECT_NAME = "TestProject"; private final IProject project; private final IJavaProject javaProject; private IPackageFragmentRoot sourceFolder; public TestProject(String name) throws CoreException { IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot(); project = root.getProject(name); if (!project.exists()) { project.create(null); project.open(null); } javaProject = JavaCore.create(project); IFolder binFolder = createBinFolder(); setJavaNature(); javaProject.setRawClasspath(new IClasspathEntry[0], null); createOutputFolder(binFolder); sourceFolder = createSourceFolder(); addSystemLibraries(); javaProject.setOption(CompilerOptions.OPTION_Compliance, "1.5"); javaProject.setOption(CompilerOptions.OPTION_Source, "1.5"); javaProject.setOption(CompilerOptions.OPTION_TargetPlatform, "1.5"); } public TestProject() throws CoreException { this(TEST_PROJECT_NAME); } public IProject getProject() { return project; } public IJavaProject getJavaProject() { return javaProject; } public GroovyProjectFacade getGroovyProjectFacade() { return new GroovyProjectFacade(javaProject); } public boolean hasGroovyContainer() throws JavaModelException { IClasspathEntry[] entries = javaProject.getRawClasspath(); for (int i = 0; i < entries.length; i++) { if (entries[i].getEntryKind() == IClasspathEntry.CPE_CONTAINER && entries[i].getPath().equals( GroovyClasspathContainer.CONTAINER_ID)) { return true; } } return false; } public IPackageFragment createPackage(String name) throws CoreException { if (sourceFolder == null) sourceFolder = createSourceFolder(); return sourceFolder.createPackageFragment(name, false, null); } public void deletePackage(String name) throws CoreException { sourceFolder.getPackageFragment(name).delete(true, null); } public IType createJavaType(IPackageFragment pack, String cuName, String source) throws JavaModelException { StringBuffer buf = new StringBuffer(); if (!pack.isDefaultPackage()) { buf.append("package " + pack.getElementName() + ";" + System.getProperty("line.separator")); } buf.append(System.getProperty("line.separator")); buf.append(source); ICompilationUnit cu = pack.createCompilationUnit(cuName, buf.toString(), false, null); return cu.getTypes()[0]; } public IType createJavaTypeAndPackage(String packageName, String fileName, String source) throws CoreException { return createJavaType(createPackage(packageName), fileName, source); } // public IFile createGroovyTypeAndPackage(String packageName, // String fileName, InputStream source) throws CoreException, // IOException { // return createGroovyType(createPackage(packageName), fileName, IOUtils // .toString(source)); // } public IFile createGroovyTypeAndPackage(String packageName, String fileName, String source) throws CoreException { return createGroovyType(createPackage(packageName), fileName, source); } public IFile createGroovyType(IPackageFragment pack, String cuName, String source) throws CoreException { StringBuffer buf = new StringBuffer(); if (! pack.getElementName().equals("")) { //buf.append("package " + pack.getElementName() + ";" + System.getProperty("line.separator")); buf.append("package " + pack.getElementName() + System.getProperty("line.separator")); buf.append(System.getProperty("line.separator")); } buf.append(source); IContainer folder = (IContainer) pack.getResource(); String encoding = javaProject.getOption(JavaCore.CORE_ENCODING, true); InputStream stream; try { stream = new ByteArrayInputStream(encoding == null ? buf.toString() .getBytes() : buf.toString().getBytes(encoding)); } catch (UnsupportedEncodingException e) { throw new CoreException(new Status(IStatus.ERROR, "org.codehaus.groovy.eclipse.tests", IStatus.ERROR, "failed to create a groovy type", e)); } return createFile(folder, cuName, stream); } public void removeNature(String natureId) throws CoreException { final IProjectDescription description = project.getDescription(); final String[] ids = description.getNatureIds(); for (int i = 0; i < ids.length; ++i) { if (ids[i].equals(natureId)) { final String[] newIds = remove(ids, i); description.setNatureIds(newIds); project.setDescription(description, null); return; } } } private String[] remove(String[] ids, int index) { String[] newIds = new String[ids.length-1]; for (int i = 0, j = 0; i < ids.length; i++) { if (i != index) { newIds[j] = ids[i]; j++; } } return newIds; } public void addBuilder(String newBuilder) throws CoreException { final IProjectDescription description = project.getDescription(); ICommand[] commands = description.getBuildSpec(); ICommand newCommand = new BuildCommand(); newCommand.setBuilderName(newBuilder); ICommand[] newCommands = new ICommand[commands.length+1]; newCommands[0] = newCommand; System.arraycopy(commands, 0, newCommands, 1, commands.length); description.setBuildSpec(newCommands); project.setDescription(description, null); } public void addNature(String natureId) throws CoreException { final IProjectDescription description = project.getDescription(); final String[] ids = description.getNatureIds(); final String[] newIds = new String[ids.length+1]; newIds[0] = natureId; System.arraycopy(ids, 0, newIds, 1, ids.length); description.setNatureIds(newIds); project.setDescription(description, null); } private IFile createFile(IContainer folder, String name, InputStream contents) throws JavaModelException { IFile file = folder.getFile(new Path(name)); try { if (file.exists()) { file.delete(true, null); } file.create(contents, IResource.FORCE, null); } catch (CoreException e) { throw new JavaModelException(e); } return file; } public void dispose() throws CoreException { deleteWorkingCopies(); project.delete(true, true, null); } public void deleteContents() throws CoreException { deleteWorkingCopies(); IPackageFragment[] frags = javaProject.getPackageFragments(); for (IPackageFragment frag : frags) { if (!frag.isReadOnly()) { frag.delete(true, null); } } } private void deleteWorkingCopies() throws JavaModelException { waitForIndexer(); // delete all working copies ICompilationUnit[] workingCopies = JavaModelManager .getJavaModelManager().getWorkingCopies( DefaultWorkingCopyOwner.PRIMARY, true); if (workingCopies != null) { for (ICompilationUnit workingCopy : workingCopies) { if (workingCopy.isWorkingCopy()) { workingCopy.discardWorkingCopy(); } } } System.gc(); } private IFolder createBinFolder() throws CoreException { final IFolder binFolder = project.getFolder("bin"); if (!binFolder.exists()) ensureExists(binFolder); return binFolder; } private void setJavaNature() throws CoreException { IProjectDescription description = project.getDescription(); description.setNatureIds(new String[] { JavaCore.NATURE_ID }); project.setDescription(description, null); } private void createOutputFolder(IFolder binFolder) throws JavaModelException { IPath outputLocation = binFolder.getFullPath(); javaProject.setOutputLocation(outputLocation, null); } private IPackageFragmentRoot createSourceFolder() throws CoreException { IFolder folder = project.getFolder("src"); if (!folder.exists()) ensureExists(folder); final IClasspathEntry[] entries = javaProject .getResolvedClasspath(false); final IPackageFragmentRoot root = javaProject .getPackageFragmentRoot(folder); for (int i = 0; i < entries.length; i++) { final IClasspathEntry entry = entries[i]; if (entry.getPath().equals(folder.getFullPath())) return root; } IClasspathEntry[] oldEntries = javaProject.getRawClasspath(); IClasspathEntry[] newEntries = new IClasspathEntry[oldEntries.length + 1]; System.arraycopy(oldEntries, 0, newEntries, 0, oldEntries.length); newEntries[oldEntries.length] = JavaCore.newSourceEntry(root.getPath()); javaProject.setRawClasspath(newEntries, null); return root; } public IPackageFragmentRoot createOtherSourceFolder() throws CoreException { return createOtherSourceFolder(null); } public IPackageFragmentRoot createOtherSourceFolder(String outPath) throws CoreException { return createSourceFolder("other", outPath); } public IPackageFragmentRoot createSourceFolder(String path, String outPath) throws CoreException { return createSourceFolder(path, outPath, null); } public IPackageFragmentRoot createSourceFolder(String path, String outPath, IPath[] exclusionPattern) throws CoreException { IFolder folder = project.getFolder(path); if (!folder.exists()) { ensureExists(folder); } final IClasspathEntry[] entries = javaProject .getResolvedClasspath(false); final IPackageFragmentRoot root = javaProject .getPackageFragmentRoot(folder); for (int i = 0; i < entries.length; i++) { final IClasspathEntry entry = entries[i]; if (entry.getPath().equals(folder.getFullPath())) { return root; } } IClasspathEntry[] oldEntries = javaProject.getRawClasspath(); IClasspathEntry[] newEntries = new IClasspathEntry[oldEntries.length + 1]; System.arraycopy(oldEntries, 0, newEntries, 0, oldEntries.length); IPath outPathPath = outPath == null ? null : getProject().getFullPath().append(outPath).makeAbsolute(); newEntries[oldEntries.length] = JavaCore.newSourceEntry(root.getPath(), exclusionPattern, outPathPath); javaProject.setRawClasspath(newEntries, null); return root; } /** * @param folder * @throws CoreException */ private void ensureExists(IFolder folder) throws CoreException { if (folder.getParent().getType() == IResource.FOLDER && !folder.getParent().exists()) { ensureExists((IFolder) folder.getParent()); } folder.create(false, true, null); } public void addProjectReference(IJavaProject referent) throws JavaModelException { IClasspathEntry[] oldEntries = javaProject.getRawClasspath(); IClasspathEntry[] newEntries = new IClasspathEntry[oldEntries.length + 1]; System.arraycopy(oldEntries, 0, newEntries, 0, oldEntries.length); newEntries[oldEntries.length] = JavaCore.newProjectEntry(referent.getPath()); javaProject.setRawClasspath(newEntries, null); } public void addJarFileToClasspath(IPath path) throws JavaModelException { IClasspathEntry[] oldEntries = javaProject.getRawClasspath(); IClasspathEntry[] newEntries = new IClasspathEntry[oldEntries.length + 1]; System.arraycopy(oldEntries, 0, newEntries, 0, oldEntries.length); newEntries[oldEntries.length] = JavaCore.newLibraryEntry(path, null, null); javaProject.setRawClasspath(newEntries, null); } private void addSystemLibraries() throws JavaModelException { IClasspathEntry[] oldEntries = javaProject.getRawClasspath(); IClasspathEntry[] newEntries = new IClasspathEntry[oldEntries.length + 1]; System.arraycopy(oldEntries, 0, newEntries, 0, oldEntries.length); newEntries[oldEntries.length] = JavaRuntime .getDefaultJREContainerEntry(); javaProject.setRawClasspath(newEntries, null); } @SuppressWarnings("deprecation") public void waitForIndexer() throws JavaModelException { final TypeNameRequestor requestor = new TypeNameRequestor() {}; new SearchEngine().searchAllTypeNames(null, null, R_EXACT_MATCH | R_CASE_SENSITIVE, CLASS, createJavaSearchScope(new IJavaElement[0]), requestor, WAIT_UNTIL_READY_TO_SEARCH, null); } protected void fullBuild() throws CoreException { this.getProject().build(org.eclipse.core.resources.IncrementalProjectBuilder.FULL_BUILD, null); } public String getProblems() throws CoreException { IMarker[] markers = getProject().findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, true, IResource.DEPTH_INFINITE); StringBuilder sb = new StringBuilder(); if (markers == null || markers.length == 0) { return null; } boolean errorFound = false; sb.append("Problems:\n"); for (int i = 0; i < markers.length; i++) { if (((Integer) markers[i].getAttribute(IMarker.SEVERITY)).intValue() == IMarker.SEVERITY_ERROR) { sb.append(" "); sb.append(markers[i].getResource().getName()).append(" : "); sb.append(markers[i].getAttribute(IMarker.LOCATION)).append(" : "); sb.append(markers[i].getAttribute(IMarker.MESSAGE)).append("\n"); errorFound = true; } } return errorFound ? sb.toString() : null; } /** * Create file at project root. * @param name * @param contents * @return * @throws Exception */ public IFile createFile(String name, String contents) throws Exception { String encoding = null; try { encoding = project.getDefaultCharset(); // get project encoding as file is not accessible } catch (CoreException ce) { // use no encoding } InputStream stream = new ByteArrayInputStream(encoding == null ? contents.getBytes() : contents.getBytes(encoding)); IFile file= project.getFile(new Path(name)); if (!file.getParent().exists()) { createFolder(file.getParent()); } file.create(stream, true, null); return file; } private void createFolder(IContainer parent) throws CoreException { if (!parent.getParent().exists()) { if (parent.getParent().getType() != IResource.FOLDER) { Assert.fail("Project doesn't exist " + parent.getParent()); } createFolder(parent.getParent()); } ((IFolder) parent).create(true, true, null); } public IPackageFragmentRoot getSourceFolder() { return sourceFolder; } public ICompilationUnit createUnit(String pkg, String cuName, String cuContents) throws CoreException { IPackageFragment frag = createPackage(pkg); ICompilationUnit cu = frag.createCompilationUnit(cuName, cuContents, false, null); return cu; } public ICompilationUnit[] createUnits(String[] packages, String[] cuNames, String[] cuContents) throws CoreException { ICompilationUnit[] units = new ICompilationUnit[packages.length]; for (int i = 0; i < cuContents.length; i++) { units[i] = createUnit(packages[i], cuNames[i], cuContents[i]); } return units; } public static void addEntry(IProject project, IClasspathEntry entryPath) throws JavaModelException { IClasspathEntry[] classpath = getClasspath(project); IClasspathEntry[] newClaspath = new IClasspathEntry[classpath.length + 1]; System.arraycopy(classpath, 0, newClaspath, 0, classpath.length); newClaspath[classpath.length] = entryPath; setClasspath(project, newClaspath); } public static IClasspathEntry[] getClasspath(IProject project) { try { JavaProject javaProject = (JavaProject) JavaCore.create(project); return javaProject.getExpandedClasspath(); } catch (JavaModelException e) { e.printStackTrace(); throw new RuntimeException(e); } } public static void addExternalLibrary(IProject project, String jar) throws JavaModelException { addExternalLibrary(project, jar, false); } public static void addExternalLibrary(IProject project, String jar, boolean isExported) throws JavaModelException { addEntry(project, JavaCore.newLibraryEntry(new Path(jar), null, null, isExported)); } public static void setClasspath(IProject project, IClasspathEntry[] entries) throws JavaModelException { IJavaProject javaProject = JavaCore.create(project); javaProject.setRawClasspath(entries, null); } public static void setAutoBuilding(boolean value) { try { IWorkspace w = ResourcesPlugin.getWorkspace(); IWorkspaceDescription d = w.getDescription(); d.setAutoBuilding(value); w.setDescription(d); } catch (CoreException e) { e.printStackTrace(); throw new RuntimeException(e); } } public static boolean isAutoBuilding() { IWorkspace w = ResourcesPlugin.getWorkspace(); IWorkspaceDescription d = w.getDescription(); return d.isAutoBuilding(); } }