code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 4
991
| language
stringclasses 9
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
(function() {
'use strict';
angular
.module('gastronomeeApp')
.controller('MenuDeleteController',MenuDeleteController);
MenuDeleteController.$inject = ['$uibModalInstance', 'entity', 'Menu'];
function MenuDeleteController($uibModalInstance, entity, Menu) {
var vm = this;
vm.menu = entity;
vm.clear = clear;
vm.confirmDelete = confirmDelete;
function clear () {
$uibModalInstance.dismiss('cancel');
}
function confirmDelete (id) {
Menu.delete({id: id},
function () {
$uibModalInstance.close(true);
});
}
}
})();
|
goxhaj/gastronomee
|
src/main/webapp/app/dashboard/menu/menu-delete-dialog.controller.js
|
JavaScript
|
apache-2.0
| 694 |
/*
* Copyright (C) 2019 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gapid.perfetto.models;
import static com.google.gapid.util.MoreFutures.logFailure;
import static com.google.gapid.util.MoreFutures.transform;
import static com.google.gapid.util.MoreFutures.transformAsync;
import static com.google.gapid.util.Scheduler.EXECUTOR;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import com.google.common.cache.Cache;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.gapid.perfetto.TimeSpan;
import com.google.gapid.util.Caches;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.logging.Logger;
// Note on multi-threading issues here:
// Because of how the window tables work, the below computeData(..) calls have to be serialized
// by track. That is, data for different requests can not be fetched in parallel. Thus, the calls
// to computeData(..) are funneled through the getDataLock semaphore. Care needs to be taken not to
// block the executor threads indefinitely, as otherwise a deadlock could occur, due to the results
// of the query futures no longer being able to be executed. Thus, the semaphore is try-acquired
// with a short timeout, followed by a slightly longer wait, before retrying.
/**
* A {@link Track} is responsible for loading the data to be shown in the UI.
*/
public abstract class Track<D extends Track.Data> {
private static final Logger LOG = Logger.getLogger(Track.class.getName());
public static final long QUANTIZE_CUT_OFF = 2000;
private static final long REQUEST_DELAY_MS = 50;
private static final long ACQUIRE_TIMEOUT_MS = 5;
private static final long ACQUIRE_RETRY_MS = 10;
private static final long PAGE_SIZE = 3600;
private static DataCache cache = new DataCache();
private final String trackId;
private D data;
private ListenableFuture<?> scheduledFuture;
// Set to null on any thread, set to non-null only on the UI thread.
private final AtomicReference<ScheduledRequest<D>> scheduledRequest =
new AtomicReference<ScheduledRequest<D>>(null);
private final Semaphore getDataLock = new Semaphore(1);
private boolean initialized; // guarded by getDataLock
public Track(String trackId) {
this.trackId = trackId.replace("-", "_");
}
public String getId() {
return trackId;
}
// on UI Thread
public D getData(DataRequest req, OnUiThread<D> onUiThread) {
if (checkScheduledRequest(req, onUiThread) && (data == null || !data.request.satisfies(req))) {
schedule(req.pageAlign(), onUiThread);
}
return data;
}
// on UI Thread. returns true, if a new request may be scheduled.
private boolean checkScheduledRequest(DataRequest req, OnUiThread<D> callback) {
ScheduledRequest<D> scheduled = scheduledRequest.get();
if (scheduled == null) {
return true;
} else if (scheduled.satisfies(req)) {
scheduled.addCallback(callback);
return false;
}
scheduledFuture.cancel(true);
scheduledFuture = null;
scheduledRequest.set(null);
return true;
}
// on UI Thread
private void schedule(DataRequest request, OnUiThread<D> onUiThread) {
D newData = cache.getIfPresent(this, request);
if (newData != null) {
data = newData;
return;
}
ScheduledRequest<D> scheduled = new ScheduledRequest<D>(request, onUiThread);
scheduledRequest.set(scheduled);
scheduledFuture = EXECUTOR.schedule(
() -> query(scheduled), REQUEST_DELAY_MS, MILLISECONDS);
}
// *not* on UI Thread
private void query(ScheduledRequest<D> scheduled) {
try {
if (!getDataLock.tryAcquire(ACQUIRE_TIMEOUT_MS, MILLISECONDS)) {
logFailure(LOG, EXECUTOR.schedule(
() -> query(scheduled), ACQUIRE_RETRY_MS, MILLISECONDS));
return;
}
} catch (InterruptedException e) {
// We were cancelled while waiting on the lock.
scheduledRequest.compareAndSet(scheduled, null);
return;
}
if (scheduledRequest.get() != scheduled) {
getDataLock.release();
return;
}
try {
ListenableFuture<D> future = transformAsync(setup(), $ -> computeData(scheduled.request));
scheduled.scheduleCallbacks(future, newData -> update(scheduled, newData));
// Always unlock when the future completes/fails/is cancelled.
future.addListener(getDataLock::release, EXECUTOR);
} catch (RuntimeException e) {
getDataLock.release();
throw e;
}
}
// on UI Thread
private void update(ScheduledRequest<D> scheduled, D newData) {
cache.put(this, scheduled.request, newData);
if (scheduledRequest.compareAndSet(scheduled, null)) {
data = newData;
scheduledFuture = null;
}
}
private ListenableFuture<?> setup() {
if (initialized) {
return Futures.immediateFuture(null);
}
return transform(initialize(), $ -> initialized = true);
}
protected abstract ListenableFuture<?> initialize();
protected abstract ListenableFuture<D> computeData(DataRequest req);
protected String tableName(String prefix) {
return prefix + "_" + trackId;
}
public static interface OnUiThread<T> {
/**
* Runs the consumer with the result of the given future on the UI thread.
*/
public void onUiThread(ListenableFuture<T> future, Consumer<T> callback);
public void repaint();
}
public static class Data {
public final DataRequest request;
public Data(DataRequest request) {
this.request = request;
}
}
public static class DataRequest {
public final TimeSpan range;
public final long resolution;
public DataRequest(TimeSpan range, long resolution) {
this.range = range;
this.resolution = resolution;
}
public DataRequest pageAlign() {
return new DataRequest(range.align(PAGE_SIZE * resolution), resolution);
}
public boolean satisfies(DataRequest other) {
return resolution == other.resolution && range.contains(other.range);
}
@Override
public String toString() {
return "Request{start: " + range.start + ", end: " + range.end + ", res: " + resolution + "}";
}
}
public static class Window {
private static final long RESOLUTION_QUANTIZE_CUTOFF = MICROSECONDS.toNanos(80);
private static final String UPDATE_SQL = "update %s set " +
"window_start = %d, window_dur = %d, quantum = %d where rowid = 0";
public final long start;
public final long end;
public final boolean quantized;
public final long bucketSize;
private Window(long start, long end, boolean quantized, long bucketSize) {
this.start = start;
this.end = end;
this.quantized = quantized;
this.bucketSize = bucketSize;
}
public static Window compute(DataRequest request) {
return new Window(request.range.start, request.range.end, false, 0);
}
public static Window compute(DataRequest request, int bucketSizePx) {
if (request.resolution >= RESOLUTION_QUANTIZE_CUTOFF) {
return quantized(request, bucketSizePx);
} else {
return compute(request);
}
}
public static Window quantized(DataRequest request, int bucketSizePx) {
long quantum = request.resolution * bucketSizePx;
long start = (request.range.start / quantum) * quantum;
return new Window(start, request.range.end, true, quantum);
}
public int getNumberOfBuckets() {
return (int)((end - start + bucketSize - 1) / bucketSize);
}
public ListenableFuture<?> update(QueryEngine qe, String name) {
return qe.query(String.format(
UPDATE_SQL, name, start, Math.max(1, end - start), bucketSize));
}
@Override
public String toString() {
return "window{start: " + start + ", end: " + end +
(quantized ? ", " + getNumberOfBuckets() : "") + "}";
}
}
public abstract static class WithQueryEngine<D extends Track.Data> extends Track<D> {
protected final QueryEngine qe;
public WithQueryEngine(QueryEngine qe, String trackId) {
super(trackId);
this.qe = qe;
}
}
private static class ScheduledRequest<D extends Track.Data> {
public final DataRequest request;
private final List<OnUiThread<D>> callbacks;
public ScheduledRequest(DataRequest request, OnUiThread<D> callback) {
this.request = request;
this.callbacks = Lists.newArrayList(callback);
}
public boolean satisfies(DataRequest req) {
return request.satisfies(req);
}
// Only on UI thread.
public void addCallback(OnUiThread<D> callback) {
callbacks.add(callback);
}
// Not on UI thread.
public void scheduleCallbacks(ListenableFuture<D> future, Consumer<D> update) {
// callbacks.get(0) is safe since we only ever append to the list.
callbacks.get(0).onUiThread(future, data -> {
update.accept(data);
for (OnUiThread<D> callback : callbacks) {
callback.repaint();
}
});
}
}
private static class DataCache {
private final Cache<Key, Object> dataCache = Caches.softCache();
public DataCache() {
}
@SuppressWarnings("unchecked")
public <D extends Track.Data> D getIfPresent(Track<D> track, DataRequest req) {
return (D)dataCache.getIfPresent(new Key(track, req));
}
public <D extends Track.Data> void put(Track<D> track, DataRequest req, D data) {
dataCache.put(new Key(track, req), data);
}
private static class Key {
private final Track<?> track;
private final long resolution;
private final long start;
private final long end;
private final int h;
public Key(Track<?> track, DataRequest req) {
this.track = track;
this.resolution = req.resolution;
this.start = req.range.start;
this.end = req.range.end;
this.h = ((track.hashCode() * 31 + Long.hashCode(resolution)) * 31 +
Long.hashCode(start)) + Long.hashCode(end);
}
@Override
public int hashCode() {
return h;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (!(obj instanceof Key)) {
return false;
}
Key o = (Key)obj;
return track == o.track && resolution == o.resolution && start == o.start && end == o.end;
}
}
}
}
|
google/gapid
|
gapic/src/main/com/google/gapid/perfetto/models/Track.java
|
Java
|
apache-2.0
| 11,222 |
package de.tu_berlin.indoornavigation;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import de.tu_berlin.indoornavigation.entities.Group;
public class GroupsActivity extends AppCompatActivity implements GroupFragment.OnListFragmentInteractionListener {
private static final String TAG = GroupsActivity.class.toString();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_groups);
}
/**
* Open view showing users of the group.
*
* @param group
*/
public void onListFragmentInteraction(Group group) {
Log.d(TAG, group.getName());
Intent intent = new Intent(this, UsersActivity.class);
intent.putExtra("members", group.getMembers());
intent.putExtra("groupId", group.getId());
startActivity(intent);
}
}
|
IoSL-INav/android
|
app/src/main/java/de/tu_berlin/indoornavigation/GroupsActivity.java
|
Java
|
apache-2.0
| 979 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.management;
import java.util.List;
import org.neo4j.jmx.Description;
import org.neo4j.jmx.ManagementInterface;
import org.neo4j.kernel.info.LockInfo;
@ManagementInterface( name = LockManager.NAME )
@Description( "Information about the Neo4j lock status" )
public interface LockManager
{
final String NAME = "Locking";
@Description( "The number of lock sequences that would have lead to a deadlock situation that "
+ "Neo4j has detected and averted (by throwing DeadlockDetectedException)." )
long getNumberOfAvertedDeadlocks();
@Description( "Information about all locks held by Neo4j" )
List<LockInfo> getLocks();
@Description( "Information about contended locks (locks where at least one thread is waiting) held by Neo4j. "
+ "The parameter is used to get locks where threads have waited for at least the specified number "
+ "of milliseconds, a value of 0 retrieves all contended locks." )
List<LockInfo> getContendedLocks( long minWaitTime );
}
|
HuangLS/neo4j
|
advanced/management/src/main/java/org/neo4j/management/LockManager.java
|
Java
|
apache-2.0
| 1,871 |
package org.smarti18n.messages.users;
import org.smarti18n.api.v2.UsersApi;
import org.smarti18n.exceptions.UserExistException;
import org.smarti18n.exceptions.UserUnknownException;
import org.smarti18n.models.User;
import org.smarti18n.models.UserCreateDTO;
import org.smarti18n.models.UserSimplified;
import org.smarti18n.models.UserUpdateDTO;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
@RestController
public class Users2Endpoint implements UsersApi {
private final UserService userService;
public Users2Endpoint(UserService userService) {
this.userService = userService;
}
@Override
@GetMapping(PATH_USERS_FIND_ALL)
public List<User> findAll() {
return userService.findAll();
}
@Override
@GetMapping(PATH_USERS_FIND_ONE)
public User findOne(
@PathVariable("mail") String mail) throws UserUnknownException {
return userService.findOne(mail);
}
@Override
@GetMapping(PATH_USERS_FIND_ONE_SIMPLIFIED)
public UserSimplified findOneSimplified(
@PathVariable("mail") String mail) {
return userService.findOneSimplified(mail);
}
@Override
@PostMapping(PATH_USERS_CREATE)
public User create(@RequestBody UserCreateDTO dto) throws UserExistException {
return userService.register(dto);
}
@Override
@PutMapping(PATH_USERS_UPDATE)
public User update(
@PathVariable("mail") String mail,
@RequestBody UserUpdateDTO dto) throws UserUnknownException {
return userService.update(mail, dto);
}
}
|
SmartI18N/SmartI18N
|
smarti18n/smarti18n-messages/src/main/java/org/smarti18n/messages/users/Users2Endpoint.java
|
Java
|
apache-2.0
| 1,926 |
/**
* Copyright (C) 2014 Karlsruhe Institute of Technology
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package edu.kit.dama.mdm.dataorganization.test;
import edu.kit.dama.commons.types.DigitalObjectId;
import edu.kit.dama.mdm.dataorganization.entity.core.IAttribute;
import edu.kit.dama.mdm.dataorganization.entity.core.ICollectionNode;
import edu.kit.dama.mdm.dataorganization.entity.core.IFileTree;
import edu.kit.dama.mdm.dataorganization.impl.jpa.Attribute;
import edu.kit.dama.mdm.dataorganization.impl.jpa.CollectionNode;
import edu.kit.dama.mdm.dataorganization.impl.jpa.DataOrganizationNode;
import edu.kit.dama.mdm.dataorganization.impl.jpa.FileTree;
import edu.kit.dama.mdm.dataorganization.impl.jpa.persistence.PersistenceFacade;
import java.util.List;
import java.util.UUID;
import javax.persistence.EntityManager;
/**
*
* @author pasic
*/
public class TestUtil {
static void clearDB() {
EntityManager em = PersistenceFacade.getInstance().
getEntityManagerFactory().createEntityManager();
em.getTransaction().begin();
List<DataOrganizationNode> nodes = em.createQuery(
"SELECT m FROM DataOrganizationNode m",
DataOrganizationNode.class).getResultList();
for (DataOrganizationNode node : nodes) {
em.remove(node);
}
em.flush();
em.getTransaction().commit();
em.close();
}
public static IFileTree createBasicTestTree() {
IFileTree tree
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.FileTree();
DigitalObjectId digitalObjectID = new DigitalObjectId("Dummy " + UUID.randomUUID().toString());
tree.setDigitalObjectId(digitalObjectID);
ICollectionNode cnp;
ICollectionNode cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("child 1");
cnp = tree.getRootNode();
tree.getRootNode().setName("root");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
IAttribute attr
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.Attribute();
attr.setKey("dummy");
attr.setValue("attribute");
cnc.addAttribute(attr);
cnc.setName("child 2");
cnp.addChild(cnc);
cnp = cnc;
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.1");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.2");
cnp.addChild(cnc);
ICollectionNode cnp22 = cnc;
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.3");
cnp.addChild(cnc);
cnp = cnc;
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.3.1");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.3.2");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.3.3");
cnp.addChild(cnc);
cnp = cnp22;
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.2.1");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.2.2");
cnp.addChild(cnc);
cnc
= new edu.kit.dama.mdm.dataorganization.entity.impl.client.CollectionNode();
cnc.setName("cnc 2.2.3");
cnp.addChild(cnc);
tree.setViewName("default");
return tree;
}
public static FileTree createBasicJPATestTree() {
FileTree tree = new FileTree();
DigitalObjectId digitalObjectID = new DigitalObjectId("Dummy");
tree.setDigitalObjectId(digitalObjectID);
CollectionNode cnp;
CollectionNode cnc = new CollectionNode();
tree.setName("root");
cnc.setName("child 1");
cnp = (CollectionNode) tree;
cnp.addChild(cnc);
cnc = new CollectionNode();
Attribute attr = new Attribute();
attr.setKey("dummy");
attr.setValue("attribute");
cnc.addAttribute(attr);
cnc.setName("child 2");
cnp.addChild(cnc);
cnp = cnc;
cnc = new CollectionNode();
cnc.setName("cnc 2.1");
cnp.addChild(cnc);
cnc = new CollectionNode();
cnc.setName("cnc 2.2");
cnp.addChild(cnc);
CollectionNode cnp22 = cnc;
cnc = new CollectionNode();
cnc.setName("cnc 2.3");
cnp.addChild(cnc);
cnp = cnc;
cnc = new CollectionNode();
cnc.setName("cnc 2.3.1");
cnp.addChild(cnc);
cnc = new CollectionNode();
cnc.setName("cnc 2.3.2");
cnp.addChild(cnc);
cnc = new CollectionNode();
cnc.setName("cnc 2.3.3");
cnp.addChild(cnc);
cnp = cnp22;
cnc = new CollectionNode();
cnc.setName("cnc 2.2.1");
cnp.addChild(cnc);
cnc = new CollectionNode();
cnc.setName("cnc 2.2.2");
cnp.addChild(cnc);
cnc = new CollectionNode();
cnc.setName("cnc 2.2.3");
cnp.addChild(cnc);
return tree;
}
}
|
kit-data-manager/base
|
DataOrganization/src/test/java/edu/kit/dama/mdm/dataorganization/test/TestUtil.java
|
Java
|
apache-2.0
| 6,221 |
var WaitState = function(game){this.game = game};
WaitState.prototype.preload = function(){
};
WaitState.prototype.create = function(){
this.background = this.game.add.sprite(0,0,'fence');
this.title = this.add.sprite(400,200, 'waitforplay');
this.title.anchor.setTo(0.5, 0.5);
this.waitBar = this.game.add.tileSprite(0,500,800,29,'tankbar');
this.waitBar.autoScroll(-200,0);
this.menubutton = this.game.add.button(400,350,'menubutton',this.menuClick, this);
this.menubutton.anchor.setTo(0.5,0.5);
this.searching = this.game.add.audio('searching');
this.searching.play();
try{
socket.emit("new player", {gameRequest: this.game.gameRequest, character: this.game.character, x: game.width/2 , y: game.height-100});
console.log("Player sent");
}catch(err){
console.log("PLayer could not be sent");
console.log(err.message);
}
};
WaitState.prototype.update = function(){
if(otherPlayerReady){
this.searching.stop();
this.game.state.start("GameState");
}else if(noGames){
this.title.destroy()
this.title = this.add.sprite(400,200,'nogames');
this.title.anchor.setTo(0.5, 0.5);
}
};
WaitState.prototype.menuClick = function(){
//this.game.state.start("MenuState",true,false);
window.location.replace(window.location.pathname);
};
|
siracoj/ProPain
|
ProPain/src/Wait.js
|
JavaScript
|
apache-2.0
| 1,384 |
package com.planet_ink.coffee_mud.Locales;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Woods extends StdRoom
{
@Override
public String ID()
{
return "Woods";
}
public Woods()
{
super();
name="the woods";
basePhyStats.setWeight(3);
recoverPhyStats();
}
@Override
public int domainType()
{
return Room.DOMAIN_OUTDOORS_WOODS;
}
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
if((msg.amITarget(this)||(msg.targetMinor()==CMMsg.TYP_ADVANCE)||(msg.targetMinor()==CMMsg.TYP_RETREAT))
&&(!msg.source().isMonster())
&&(CMLib.dice().rollPercentage()==1)
&&(CMLib.dice().rollPercentage()==1)
&&(isInhabitant(msg.source()))
&&(!CMSecurity.isDisabled(CMSecurity.DisFlag.AUTODISEASE)))
{
final Ability A=CMClass.getAbility("Disease_PoisonIvy");
if((A!=null)
&&(msg.source().fetchEffect(A.ID())==null)
&&(!CMSecurity.isAbilityDisabled(A.ID())))
A.invoke(msg.source(),msg.source(),true,0);
}
super.executeMsg(myHost,msg);
}
public static final Integer[] resourceList={
Integer.valueOf(RawMaterial.RESOURCE_WOOD),
Integer.valueOf(RawMaterial.RESOURCE_PINE),
Integer.valueOf(RawMaterial.RESOURCE_OAK),
Integer.valueOf(RawMaterial.RESOURCE_MAPLE),
Integer.valueOf(RawMaterial.RESOURCE_REDWOOD),
Integer.valueOf(RawMaterial.RESOURCE_SAP),
Integer.valueOf(RawMaterial.RESOURCE_YEW),
Integer.valueOf(RawMaterial.RESOURCE_HICKORY),
Integer.valueOf(RawMaterial.RESOURCE_TEAK),
Integer.valueOf(RawMaterial.RESOURCE_CEDAR),
Integer.valueOf(RawMaterial.RESOURCE_ELM),
Integer.valueOf(RawMaterial.RESOURCE_CHERRYWOOD),
Integer.valueOf(RawMaterial.RESOURCE_BEECHWOOD),
Integer.valueOf(RawMaterial.RESOURCE_WILLOW),
Integer.valueOf(RawMaterial.RESOURCE_SYCAMORE),
Integer.valueOf(RawMaterial.RESOURCE_SPRUCE),
Integer.valueOf(RawMaterial.RESOURCE_FLOWERS),
Integer.valueOf(RawMaterial.RESOURCE_FRUIT),
Integer.valueOf(RawMaterial.RESOURCE_APPLES),
Integer.valueOf(RawMaterial.RESOURCE_BERRIES),
Integer.valueOf(RawMaterial.RESOURCE_PEACHES),
Integer.valueOf(RawMaterial.RESOURCE_CHERRIES),
Integer.valueOf(RawMaterial.RESOURCE_ORANGES),
Integer.valueOf(RawMaterial.RESOURCE_LEMONS),
Integer.valueOf(RawMaterial.RESOURCE_FUR),
Integer.valueOf(RawMaterial.RESOURCE_NUTS),
Integer.valueOf(RawMaterial.RESOURCE_HERBS),
Integer.valueOf(RawMaterial.RESOURCE_DIRT),
Integer.valueOf(RawMaterial.RESOURCE_HONEY),
Integer.valueOf(RawMaterial.RESOURCE_VINE),
Integer.valueOf(RawMaterial.RESOURCE_HIDE),
Integer.valueOf(RawMaterial.RESOURCE_FEATHERS),
Integer.valueOf(RawMaterial.RESOURCE_LEATHER)};
public static final List<Integer> roomResources=new Vector<Integer>(Arrays.asList(resourceList));
@Override
public List<Integer> resourceChoices()
{
return Woods.roomResources;
}
}
|
bozimmerman/CoffeeMud
|
com/planet_ink/coffee_mud/Locales/Woods.java
|
Java
|
apache-2.0
| 4,316 |
namespace Wundercal.Services.Dto
{
public class WunderlistList
{
public int Id { get; set; }
public string Title { get; set; }
}
}
|
marska/wundercal
|
src/Wundercal/Services/Dto/WunderlistList.cs
|
C#
|
apache-2.0
| 145 |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.actions;
import com.intellij.codeInsight.CodeInsightActionHandler;
import javax.annotation.Nonnull;
/**
* @author Dmitry Avdeev
*/
public abstract class SimpleCodeInsightAction extends CodeInsightAction implements CodeInsightActionHandler {
@Nonnull
@Override
protected CodeInsightActionHandler getHandler() {
return this;
}
@Override
public boolean startInWriteAction() {
return true;
}
}
|
consulo/consulo
|
modules/base/lang-api/src/main/java/com/intellij/codeInsight/actions/SimpleCodeInsightAction.java
|
Java
|
apache-2.0
| 1,052 |
/*
* Copyright 2016 ANI Technologies Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.olacabs.fabric.processors.kafkawriter;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.olacabs.fabric.compute.ProcessingContext;
import com.olacabs.fabric.compute.processor.InitializationException;
import com.olacabs.fabric.compute.processor.ProcessingException;
import com.olacabs.fabric.compute.processor.StreamingProcessor;
import com.olacabs.fabric.compute.util.ComponentPropertyReader;
import com.olacabs.fabric.model.common.ComponentMetadata;
import com.olacabs.fabric.model.event.Event;
import com.olacabs.fabric.model.event.EventSet;
import com.olacabs.fabric.model.processor.Processor;
import com.olacabs.fabric.model.processor.ProcessorType;
import kafka.javaapi.producer.Producer;
import kafka.producer.DefaultPartitioner;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import lombok.Getter;
import lombok.Setter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
/**
* TODO Javadoc.
*/
@Processor(
namespace = "global",
name = "kafka-writer",
version = "2.0",
description = "A processor that write data into kafka",
cpu = 0.1,
memory = 1,
processorType = ProcessorType.EVENT_DRIVEN,
requiredProperties = {
"brokerList",
"ingestionPoolSize",
"kafkaKeyJsonPath"
},
optionalProperties = {
"isTopicOnJsonPath",
"topic",
"topicJsonPath",
"ignoreError",
"kafkaSerializerClass",
"ackCount"
})
public class KafkaWriter extends StreamingProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaWriter.class.getSimpleName());
private static final boolean DEFAULT_IGNORE_SERIALIZATION_ERROR = false;
private static final boolean DEFAULT_TOPIC_ON_JSON_PATH = false;
private static final String DEFAULT_SERIALIZER_CLASS = "kafka.serializer.StringEncoder";
private static final String DEFAULT_KAFKA_KEY_JSON_PATH = "/metadata/partitionKey/value";
private static final int DEFAULT_ACK_COUNT = 1;
private static final int DEFAULT_BATCH_SIZE = 10;
private static final String ACK_COUNT = "-1";
private String kafkaKeyJsonPath;
private boolean ignoreError;
private ObjectMapper mapper;
@Getter
@Setter
private String kafkaTopic;
@Getter
@Setter
private String kafkaTopicJsonPath;
@Getter
@Setter
private int ingestionPoolSize;
@Getter
@Setter
private Producer<String, String> producer;
@Getter
@Setter
private boolean isTopicOnJsonPath = false;
@Override
protected EventSet consume(ProcessingContext processingContext, EventSet eventSet) throws ProcessingException {
final List<KeyedMessage<String, String>> messages = Lists.newArrayList();
try {
eventSet.getEvents().forEach(event -> {
KeyedMessage<String, String> convertedMessage = null;
try {
convertedMessage = convertEvent(event);
} catch (ProcessingException e) {
LOGGER.error("Error converting byte stream to event: ", e);
throw new RuntimeException(e);
}
if (null != convertedMessage) {
messages.add(convertedMessage);
}
});
} catch (final Exception e) {
LOGGER.error("Error converting byte stream to event: ", e);
throw new ProcessingException(e);
}
Lists.partition(messages, ingestionPoolSize).forEach(messageList -> getProducer().send(messageList));
return eventSet;
}
/**
* convert the event into Kafka keyed messages.
*
* @param event to convert
* @return KeyedMessage
*/
protected KeyedMessage<String, String> convertEvent(Event event) throws ProcessingException {
JsonNode eventData = event.getJsonNode();
if (null == eventData) {
if (event.getData() instanceof byte[]) {
try {
eventData = mapper.readTree((byte[]) event.getData());
} catch (IOException e) {
LOGGER.error("Error converting byte stream to event: ", e);
if (!ignoreError) {
LOGGER.error("Error converting byte stream to event");
throw new ProcessingException("Error converting byte stream to event", e);
}
return null;
}
} else {
if (!ignoreError) {
LOGGER.error("Error converting byte stream to event: Event is not byte stream");
throw new ProcessingException("Error converting byte stream to event: Event is not byte stream");
}
return null;
}
}
final String kafkaKey = kafkaKeyJsonPath != null
? eventData.at(kafkaKeyJsonPath).asText().replace("\"", "")
: eventData.at(DEFAULT_KAFKA_KEY_JSON_PATH).asText().replace("\"", "");
final String topic = isTopicOnJsonPath()
? eventData.at(getKafkaTopicJsonPath()).toString().replace("\"", "")
: getKafkaTopic().replace("\"", "");
return new KeyedMessage<>(topic, kafkaKey, eventData.toString());
}
@Override
public void initialize(String instanceId, Properties globalProperties, Properties properties,
ComponentMetadata componentMetadata) throws InitializationException {
final String kafkaBrokerList = ComponentPropertyReader
.readString(properties, globalProperties, "brokerList", instanceId, componentMetadata);
isTopicOnJsonPath = ComponentPropertyReader
.readBoolean(properties, globalProperties, "isTopicOnJsonPath", instanceId, componentMetadata,
DEFAULT_TOPIC_ON_JSON_PATH);
if (!isTopicOnJsonPath) {
kafkaTopic = ComponentPropertyReader
.readString(properties, globalProperties, "topic", instanceId, componentMetadata);
if (kafkaTopic == null) {
LOGGER.error("Kafka topic in properties not found");
throw new RuntimeException("Kafka topic in properties not found");
}
setKafkaTopic(kafkaTopic);
} else {
kafkaTopicJsonPath = ComponentPropertyReader
.readString(properties, globalProperties, "topicJsonPath", instanceId, componentMetadata);
if (kafkaTopicJsonPath == null) {
LOGGER.error("Kafka topic json path not found");
throw new RuntimeException("Kafka topic json path not found");
}
setKafkaTopicJsonPath(kafkaTopicJsonPath);
}
kafkaKeyJsonPath = ComponentPropertyReader
.readString(properties, globalProperties, "kafkaKeyJsonPath", instanceId, componentMetadata,
DEFAULT_KAFKA_KEY_JSON_PATH);
final String kafkaSerializerClass = ComponentPropertyReader
.readString(properties, globalProperties, "kafkaSerializerClass", instanceId, componentMetadata,
DEFAULT_SERIALIZER_CLASS);
ingestionPoolSize = ComponentPropertyReader
.readInteger(properties, globalProperties, "ingestionPoolSize", instanceId, componentMetadata,
DEFAULT_BATCH_SIZE);
final Integer ackCount = ComponentPropertyReader
.readInteger(properties, globalProperties, "ackCount", instanceId, componentMetadata,
DEFAULT_ACK_COUNT);
ignoreError = ComponentPropertyReader
.readBoolean(properties, globalProperties, "ignoreError", instanceId, componentMetadata,
DEFAULT_IGNORE_SERIALIZATION_ERROR);
final Properties props = new Properties();
props.put("metadata.broker.list", kafkaBrokerList);
props.put("serializer.class", kafkaSerializerClass);
props.put("partitioner.class", DefaultPartitioner.class.getName());
props.put("request.required.acks", ACK_COUNT);
props.put("min.isr", Integer.toString(ackCount));
producer = new Producer<>(new ProducerConfig(props));
mapper = new ObjectMapper();
LOGGER.info("Initialized kafka writer...");
}
@Override
public void destroy() {
producer.close();
LOGGER.info("Closed kafka writer...");
}
}
|
olacabs/fabric
|
fabric-components/kafka-writer/src/main/java/com/olacabs/fabric/processors/kafkawriter/KafkaWriter.java
|
Java
|
apache-2.0
| 9,285 |
package navyblue.top.colortalk.ui.activities;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import com.github.paolorotolo.appintro.AppIntro2;
import navyblue.top.colortalk.R;
import navyblue.top.colortalk.ui.fragments.SampleSlide;
/**
* Created by CIR on 16/6/4.
*/
public class AppIntroActivity extends AppIntro2 {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFlowAnimation();
addSlide(SampleSlide.newInstance(R.layout.intro));
addSlide(SampleSlide.newInstance(R.layout.intro_2));
addSlide(SampleSlide.newInstance(R.layout.intro3));
addSlide(SampleSlide.newInstance(R.layout.intro4));
addSlide(SampleSlide.newInstance(R.layout.intro5));
}
@Override
public void onDonePressed(Fragment currentFragment) {
finish();
}
@Override
public void onSkipPressed(Fragment currentFragment) {
finish();
}
}
|
YogiAi/ColorTalk_Android
|
app/src/main/java/navyblue/top/colortalk/ui/activities/AppIntroActivity.java
|
Java
|
apache-2.0
| 990 |
/*
* Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.codeInspection.i18n;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.template.macro.MacroUtil;
import com.intellij.lang.properties.*;
import com.intellij.lang.properties.ResourceBundle;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.lang.properties.psi.PropertyCreationHandler;
import com.intellij.lang.properties.references.I18nUtil;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.scope.util.PsiScopesUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.util.ArrayUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.uast.*;
import java.text.MessageFormat;
import java.util.*;
/**
* @author max
*/
public class JavaI18nUtil extends I18nUtil {
public static final PropertyCreationHandler DEFAULT_PROPERTY_CREATION_HANDLER =
(project, propertiesFiles, key, value, parameters) -> createProperty(project, propertiesFiles, key, value, true);
private JavaI18nUtil() {
}
@Nullable
public static TextRange getSelectedRange(Editor editor, final PsiFile psiFile) {
if (editor == null) return null;
String selectedText = editor.getSelectionModel().getSelectedText();
if (selectedText != null) {
return new TextRange(editor.getSelectionModel().getSelectionStart(), editor.getSelectionModel().getSelectionEnd());
}
PsiElement psiElement = psiFile.findElementAt(editor.getCaretModel().getOffset());
if (psiElement == null || psiElement instanceof PsiWhiteSpace) return null;
return psiElement.getTextRange();
}
public static boolean mustBePropertyKey(@NotNull PsiExpression expression, @Nullable Ref<? super PsiAnnotationMemberValue> resourceBundleRef) {
PsiElement parent = expression.getParent();
if (parent instanceof PsiVariable) {
final PsiAnnotation annotation = AnnotationUtil.findAnnotation((PsiVariable)parent, AnnotationUtil.PROPERTY_KEY);
if (annotation != null) {
processAnnotationAttributes(resourceBundleRef, annotation);
return true;
}
}
return isPassedToAnnotatedParam(expression, AnnotationUtil.PROPERTY_KEY, resourceBundleRef, null);
}
public static boolean mustBePropertyKey(@NotNull ULiteralExpression expression, @Nullable Ref<? super UExpression> resourceBundleRef) {
final UElement parent = expression.getUastParent();
if (parent instanceof UVariable) {
UAnnotation annotation = ((UVariable)parent).findAnnotation(AnnotationUtil.PROPERTY_KEY);
if (annotation != null) {
processAnnotationAttributes(resourceBundleRef, annotation);
return true;
}
}
UCallExpression callExpression = UastUtils.getUCallExpression(expression);
if (callExpression == null) return false;
PsiMethod psiMethod = callExpression.resolve();
if (psiMethod == null) return false;
PsiParameter parameter = UastUtils.getParameterForArgument(callExpression, expression);
if (parameter == null) return false;
int paramIndex = ArrayUtil.indexOf(psiMethod.getParameterList().getParameters(), parameter);
if (paramIndex == -1) return false;
return isMethodParameterAnnotatedWith(psiMethod, paramIndex, null, AnnotationUtil.PROPERTY_KEY, null, null);
}
static boolean isPassedToAnnotatedParam(@NotNull PsiExpression expression,
final String annFqn,
@Nullable Ref<? super PsiAnnotationMemberValue> resourceBundleRef,
@Nullable final Set<? super PsiModifierListOwner> nonNlsTargets) {
expression = getTopLevelExpression(expression);
final PsiElement parent = expression.getParent();
if (!(parent instanceof PsiExpressionList)) return false;
int idx = -1;
final PsiExpression[] args = ((PsiExpressionList)parent).getExpressions();
for (int i = 0; i < args.length; i++) {
PsiExpression arg = args[i];
if (PsiTreeUtil.isAncestor(arg, expression, false)) {
idx = i;
break;
}
}
if (idx == -1) return false;
PsiElement grParent = parent.getParent();
if (grParent instanceof PsiAnonymousClass) {
grParent = grParent.getParent();
}
if (grParent instanceof PsiCall) {
PsiMethod method = ((PsiCall)grParent).resolveMethod();
return method != null && isMethodParameterAnnotatedWith(method, idx, null, annFqn, resourceBundleRef, nonNlsTargets);
}
return false;
}
@NotNull
static PsiExpression getTopLevelExpression(@NotNull PsiExpression expression) {
while (expression.getParent() instanceof PsiExpression) {
final PsiExpression parent = (PsiExpression)expression.getParent();
if (parent instanceof PsiConditionalExpression &&
((PsiConditionalExpression)parent).getCondition() == expression) {
break;
}
expression = parent;
if (expression instanceof PsiAssignmentExpression) break;
}
return expression;
}
static boolean isMethodParameterAnnotatedWith(final PsiMethod method,
final int idx,
@Nullable Collection<? super PsiMethod> processed,
final String annFqn,
@Nullable Ref<? super PsiAnnotationMemberValue> resourceBundleRef,
@Nullable final Set<? super PsiModifierListOwner> nonNlsTargets) {
if (processed != null) {
if (processed.contains(method)) return false;
}
else {
processed = new THashSet<>();
}
processed.add(method);
final PsiParameter[] params = method.getParameterList().getParameters();
PsiParameter param;
if (idx >= params.length) {
if (params.length == 0) {
return false;
}
PsiParameter lastParam = params[params.length - 1];
if (lastParam.isVarArgs()) {
param = lastParam;
}
else {
return false;
}
}
else {
param = params[idx];
}
final PsiAnnotation annotation = AnnotationUtil.findAnnotation(param, annFqn);
if (annotation != null) {
processAnnotationAttributes(resourceBundleRef, annotation);
return true;
}
if (nonNlsTargets != null) {
nonNlsTargets.add(param);
}
final PsiMethod[] superMethods = method.findSuperMethods();
for (PsiMethod superMethod : superMethods) {
if (isMethodParameterAnnotatedWith(superMethod, idx, processed, annFqn, resourceBundleRef, null)) return true;
}
return false;
}
private static void processAnnotationAttributes(@Nullable Ref<? super PsiAnnotationMemberValue> resourceBundleRef,
@NotNull PsiAnnotation annotation) {
if (resourceBundleRef != null) {
final PsiAnnotationParameterList parameterList = annotation.getParameterList();
final PsiNameValuePair[] attributes = parameterList.getAttributes();
for (PsiNameValuePair attribute : attributes) {
final String name = attribute.getName();
if (AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER.equals(name)) {
resourceBundleRef.set(attribute.getValue());
}
}
}
}
private static void processAnnotationAttributes(@Nullable Ref<? super UExpression> resourceBundleRef,
@NotNull UAnnotation annotation) {
if (resourceBundleRef != null) {
for (UNamedExpression attribute : annotation.getAttributeValues()) {
final String name = attribute.getName();
if (AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER.equals(name)) {
resourceBundleRef.set(attribute.getExpression());
}
}
}
}
static boolean isValidPropertyReference(@NotNull Project project,
@NotNull PsiExpression expression,
@NotNull String key,
@NotNull Ref<? super String> outResourceBundle) {
Ref<PsiAnnotationMemberValue> resourceBundleRef = Ref.create();
if (mustBePropertyKey(expression, resourceBundleRef)) {
final Object resourceBundleName = resourceBundleRef.get();
if (!(resourceBundleName instanceof PsiExpression)) {
return false;
}
PsiExpression expr = (PsiExpression)resourceBundleName;
final PsiConstantEvaluationHelper constantEvaluationHelper = JavaPsiFacade.getInstance(project).getConstantEvaluationHelper();
Object value = constantEvaluationHelper.computeConstantExpression(expr);
if (value == null) {
if (expr instanceof PsiReferenceExpression) {
final PsiElement resolve = ((PsiReferenceExpression)expr).resolve();
if (resolve instanceof PsiField && ((PsiField)resolve).hasModifierProperty(PsiModifier.FINAL)) {
value = constantEvaluationHelper.computeConstantExpression(((PsiField)resolve).getInitializer());
if (value == null) {
return false;
}
}
}
if (value == null) {
final ResourceBundle resourceBundle = resolveResourceBundleByKey(key, project);
if (resourceBundle == null) {
return false;
}
final PropertiesFile defaultPropertiesFile = resourceBundle.getDefaultPropertiesFile();
final String bundleName = BundleNameEvaluator.DEFAULT.evaluateBundleName(defaultPropertiesFile.getContainingFile());
if (bundleName == null) {
return false;
}
value = bundleName;
}
}
String bundleName = value.toString();
outResourceBundle.set(bundleName);
return isPropertyRef(expression, key, bundleName);
}
return true;
}
@Nullable
private static ResourceBundle resolveResourceBundleByKey(@NotNull final String key, @NotNull final Project project) {
final Ref<ResourceBundle> bundleRef = Ref.create();
final boolean r = PropertiesReferenceManager.getInstance(project).processAllPropertiesFiles((baseName, propertiesFile) -> {
if (propertiesFile.findPropertyByKey(key) != null) {
if (bundleRef.get() == null) {
bundleRef.set(propertiesFile.getResourceBundle());
}
else {
return bundleRef.get().equals(propertiesFile.getResourceBundle());
}
}
return true;
});
return r ? bundleRef.get() : null;
}
static boolean isPropertyRef(final PsiExpression expression, final String key, final String resourceBundleName) {
if (resourceBundleName == null) {
return !PropertiesImplUtil.findPropertiesByKey(expression.getProject(), key).isEmpty();
}
else {
final List<PropertiesFile> propertiesFiles = propertiesFilesByBundleName(resourceBundleName, expression);
boolean containedInPropertiesFile = false;
for (PropertiesFile propertiesFile : propertiesFiles) {
containedInPropertiesFile |= propertiesFile.findPropertyByKey(key) != null;
}
return containedInPropertiesFile;
}
}
public static Set<String> suggestExpressionOfType(final PsiClassType type, final PsiLiteralExpression context) {
PsiVariable[] variables = MacroUtil.getVariablesVisibleAt(context, "");
Set<String> result = new LinkedHashSet<>();
for (PsiVariable var : variables) {
PsiType varType = var.getType();
PsiIdentifier identifier = var.getNameIdentifier();
if ((type == null || type.isAssignableFrom(varType)) && identifier != null) {
result.add(identifier.getText());
}
}
PsiExpression[] expressions = MacroUtil.getStandardExpressionsOfType(context, type);
for (PsiExpression expression : expressions) {
result.add(expression.getText());
}
if (type != null) {
addAvailableMethodsOfType(type, context, result);
}
return result;
}
private static void addAvailableMethodsOfType(final PsiClassType type,
final PsiLiteralExpression context,
final Collection<? super String> result) {
PsiScopesUtil.treeWalkUp((element, state) -> {
if (element instanceof PsiMethod) {
PsiMethod method = (PsiMethod)element;
PsiType returnType = method.getReturnType();
if (returnType != null && TypeConversionUtil.isAssignable(type, returnType)
&& method.getParameterList().isEmpty()) {
result.add(method.getName() + "()");
}
}
return true;
}, context, null);
}
/**
* Returns number of different message format parameters in property value
*
* <i>Class {0} info: Class {0} extends class {1} and implements interface {2}</i>
* number of parameters is 3.
*
* @return number of parameters from single property or 0 for wrong format
*/
public static int getPropertyValuePlaceholdersCount(@NotNull final String propertyValue) {
try {
return new MessageFormat(propertyValue).getFormatsByArgumentIndex().length;
}
catch (final IllegalArgumentException e) {
return 0;
}
}
/**
* Returns number of different parameters in i18n message. For example, for string
*
* <i>Class {0} info: Class {0} extends class {1} and implements interface {2}</i> in one translation of property
* <i>Class {0} info: Class {0} extends class {1} </i> in other translation of property
* <p>
* number of parameters is 3.
*
* @param expression i18n literal
* @return number of parameters
*/
public static int getPropertyValueParamsMaxCount(@NotNull final UExpression expression) {
final SortedSet<Integer> paramsCount = getPropertyValueParamsCount(expression, null);
if (paramsCount.isEmpty()) {
return -1;
}
return paramsCount.last();
}
@NotNull
static SortedSet<Integer> getPropertyValueParamsCount(@NotNull final PsiExpression expression,
@Nullable final String resourceBundleName) {
UExpression uExpression = UastContextKt.toUElement(expression, UExpression.class);
if (uExpression == null) return new TreeSet<>();
return getPropertyValueParamsCount(uExpression, resourceBundleName);
}
@NotNull
private static SortedSet<Integer> getPropertyValueParamsCount(@NotNull final UExpression expression,
@Nullable final String resourceBundleName) {
final ULiteralExpression literalExpression;
if (expression instanceof ULiteralExpression) {
literalExpression = (ULiteralExpression)expression;
}
else if (expression instanceof UReferenceExpression) {
final PsiElement resolved = ((UReferenceExpression)expression).resolve();
final PsiField field = resolved == null ? null : (PsiField)resolved;
literalExpression =
field != null && field.hasModifierProperty(PsiModifier.FINAL) && field.getInitializer() instanceof PsiLiteralExpression
? UastContextKt.toUElement(field.getInitializer(), ULiteralExpression.class)
: null;
}
else {
literalExpression = null;
}
final TreeSet<Integer> paramsCount = new TreeSet<>();
if (literalExpression == null) {
return paramsCount;
}
for (PsiReference reference : UastLiteralUtils.getInjectedReferences(literalExpression)) {
if (reference instanceof PsiPolyVariantReference) {
for (ResolveResult result : ((PsiPolyVariantReference)reference).multiResolve(false)) {
if (result.isValidResult() && result.getElement() instanceof IProperty) {
try {
final IProperty property = (IProperty)result.getElement();
if (resourceBundleName != null) {
final PsiFile file = property.getPropertiesFile().getContainingFile();
if (!resourceBundleName.equals(BundleNameEvaluator.DEFAULT.evaluateBundleName(file))) {
continue;
}
}
final String propertyValue = property.getValue();
if (propertyValue == null) {
continue;
}
paramsCount.add(getPropertyValuePlaceholdersCount(propertyValue));
}
catch (IllegalArgumentException ignored) {
}
}
}
}
}
return paramsCount;
}
}
|
mdanielwork/intellij-community
|
plugins/java-i18n/src/com/intellij/codeInspection/i18n/JavaI18nUtil.java
|
Java
|
apache-2.0
| 16,944 |
package entity.chess;
import java.util.ArrayList;
import entity.Board;
import entity.Coordinate;
//Author 在线疯狂
//Homepage http://bookshadow.com
public class General extends Chess {
public final static int VALUE = 100000;
private final static int deltaX[] = { 1, 0, -1, 0 };
private final static int deltaY[] = { 0, 1, 0, -1 };
public General() {
super.setCode(Chess.GENERAL);
}
@Override
public ArrayList<Coordinate> getPossibleLocations(Board chessBoard) {
Chess[][] board = chessBoard.getBoard();
ArrayList<Coordinate> al = new ArrayList<Coordinate>();
Coordinate sourceCoo = getCoordinate();
for (int i = 0; i < deltaX.length; i++) {
int x = sourceCoo.getX() + deltaX[i];
int y = sourceCoo.getY() + deltaY[i];
if (!isValid(x, y)) {
continue;
}
if (board[x][y] != null && (board[x][y].getColor() == getColor())) {
continue;
}
al.add(new Coordinate(x, y));
}
Coordinate enemyGeneralCoo = generalMeet(board);
if (enemyGeneralCoo != null)
al.add(enemyGeneralCoo);
return al;
}
@Override
public boolean isValid(int x, int y) {
return ((x <= 2 && x >= 0) || (x >= 7 && x <= 9)) && (y >= 3 && y <= 5);
}
public Coordinate generalMeet(Chess board[][]) {
char color = getColor();
Chess opponentGeneral = Board.findGeneral(board, Chess.oppositeColor(color));
if (opponentGeneral == null)
return null;
int sy = getCoordinate().getY();
int oy = opponentGeneral.getCoordinate().getY();
if (sy == oy) {
if (Board.countChess(board, getCoordinate(), opponentGeneral.getCoordinate()) == 2) {
return opponentGeneral.getCoordinate();
}
}
return null;
}
@Override
public int getValue(Board chessBoard) {
return VALUE;
}
}
|
qinjiannet/screen-chess-qq
|
src/entity/chess/General.java
|
Java
|
apache-2.0
| 1,727 |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* External dependencies
*/
import { memo } from '@googleforcreators/react';
/**
* Internal dependencies
*/
import {
SwapMedia,
LayerOpacity,
FlipHorizontal,
FlipVertical,
BorderWidthAndColor,
More,
Separator,
Dismiss,
} from '../elements';
const FloatingImageMenu = memo(function FloatingImageMenu() {
return (
<>
<SwapMedia />
<Separator />
<LayerOpacity />
<Separator />
<FlipHorizontal />
<FlipVertical />
<Separator />
<BorderWidthAndColor />
<Separator />
<More />
<Separator />
<Dismiss />
</>
);
});
export default FloatingImageMenu;
|
GoogleForCreators/web-stories-wp
|
packages/story-editor/src/components/floatingMenu/menus/image.js
|
JavaScript
|
apache-2.0
| 1,257 |
/*
* Copyright (c) 2009 Piotr Piastucki
*
* This file is part of Patchca CAPTCHA library.
*
* Patchca is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Patchca is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Patchca. If not, see <http://www.gnu.org/licenses/>.
*/
package com.xiongyingqi.captcha.filter;
import java.awt.image.BufferedImageOp;
import java.util.List;
public class ConfigurableFilterFactory extends AbstractFilterFactory {
private List<BufferedImageOp> filters;
@Override
public List<BufferedImageOp> getFilters() {
return filters;
}
public void setFilters(List<BufferedImageOp> filters) {
this.filters = filters;
}
}
|
blademainer/common_utils
|
common_helper/src/main/java/com/xiongyingqi/captcha/filter/ConfigurableFilterFactory.java
|
Java
|
apache-2.0
| 1,181 |
package cn.felord.wepay.ali.sdk.api.request;
import java.util.Map;
import cn.felord.wepay.ali.sdk.api.AlipayRequest;
import cn.felord.wepay.ali.sdk.api.internal.util.AlipayHashMap;
import cn.felord.wepay.ali.sdk.api.response.KoubeiRetailShopitemUploadResponse;
import cn.felord.wepay.ali.sdk.api.AlipayObject;
/**
* ALIPAY API: koubei.retail.shopitem.upload request
*
* @author auto create
* @version $Id: $Id
*/
public class KoubeiRetailShopitemUploadRequest implements AlipayRequest<KoubeiRetailShopitemUploadResponse> {
private AlipayHashMap udfParams; // add user-defined text parameters
private String apiVersion="1.0";
/**
* isv 回传的门店商品信息上传接口
*/
private String bizContent;
/**
* <p>Setter for the field <code>bizContent</code>.</p>
*
* @param bizContent a {@link java.lang.String} object.
*/
public void setBizContent(String bizContent) {
this.bizContent = bizContent;
}
/**
* <p>Getter for the field <code>bizContent</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getBizContent() {
return this.bizContent;
}
private String terminalType;
private String terminalInfo;
private String prodCode;
private String notifyUrl;
private String returnUrl;
private boolean needEncrypt=false;
private AlipayObject bizModel=null;
/**
* <p>Getter for the field <code>notifyUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getNotifyUrl() {
return this.notifyUrl;
}
/** {@inheritDoc} */
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl;
}
/**
* <p>Getter for the field <code>returnUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getReturnUrl() {
return this.returnUrl;
}
/** {@inheritDoc} */
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl;
}
/**
* <p>Getter for the field <code>apiVersion</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiVersion() {
return this.apiVersion;
}
/** {@inheritDoc} */
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
/** {@inheritDoc} */
public void setTerminalType(String terminalType){
this.terminalType=terminalType;
}
/**
* <p>Getter for the field <code>terminalType</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalType(){
return this.terminalType;
}
/** {@inheritDoc} */
public void setTerminalInfo(String terminalInfo){
this.terminalInfo=terminalInfo;
}
/**
* <p>Getter for the field <code>terminalInfo</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalInfo(){
return this.terminalInfo;
}
/** {@inheritDoc} */
public void setProdCode(String prodCode) {
this.prodCode=prodCode;
}
/**
* <p>Getter for the field <code>prodCode</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getProdCode() {
return this.prodCode;
}
/**
* <p>getApiMethodName.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiMethodName() {
return "koubei.retail.shopitem.upload";
}
/**
* <p>getTextParams.</p>
*
* @return a {@link java.util.Map} object.
*/
public Map<String, String> getTextParams() {
AlipayHashMap txtParams = new AlipayHashMap();
txtParams.put("biz_content", this.bizContent);
if(udfParams != null) {
txtParams.putAll(this.udfParams);
}
return txtParams;
}
/**
* <p>putOtherTextParam.</p>
*
* @param key a {@link java.lang.String} object.
* @param value a {@link java.lang.String} object.
*/
public void putOtherTextParam(String key, String value) {
if(this.udfParams == null) {
this.udfParams = new AlipayHashMap();
}
this.udfParams.put(key, value);
}
/**
* <p>getResponseClass.</p>
*
* @return a {@link java.lang.Class} object.
*/
public Class<KoubeiRetailShopitemUploadResponse> getResponseClass() {
return KoubeiRetailShopitemUploadResponse.class;
}
/**
* <p>isNeedEncrypt.</p>
*
* @return a boolean.
*/
public boolean isNeedEncrypt() {
return this.needEncrypt;
}
/** {@inheritDoc} */
public void setNeedEncrypt(boolean needEncrypt) {
this.needEncrypt=needEncrypt;
}
/**
* <p>Getter for the field <code>bizModel</code>.</p>
*
* @return a {@link cn.felord.wepay.ali.sdk.api.AlipayObject} object.
*/
public AlipayObject getBizModel() {
return this.bizModel;
}
/** {@inheritDoc} */
public void setBizModel(AlipayObject bizModel) {
this.bizModel=bizModel;
}
}
|
NotFound403/WePay
|
src/main/java/cn/felord/wepay/ali/sdk/api/request/KoubeiRetailShopitemUploadRequest.java
|
Java
|
apache-2.0
| 4,790 |
#include<iostream>
#include<vector>
#include<list>
#include<queue>
using namespace std;
void breadth_first_search(vector<list<int>> graph,int src){
vector<bool>visited(graph.size(),false);
queue<int>Q;
Q.push(src);
visited[src] = true;
while(!Q.empty()){
int vertex = Q.front(); Q.pop();
cout << vertex << " ";
for(list<int>::iterator itr = graph[vertex].begin();itr!=graph[vertex].end();itr++){
if(!visited[*itr])
Q.push(*itr);
visited[*itr] = true;
}
}
}
int main(){
vector<list<int>> graph;
int v,e,src,des;
cin >> v >> e;
graph.resize(v);
while(e--){
cin >> src >> des;
graph[src].push_back(des);
graph[des].push_back(src);
}
cin >> src;
breadth_first_search(graph,src);
return 0;
}
|
mission-peace/interview
|
C++/Graph Algorithms/Breadth First Search.cpp
|
C++
|
apache-2.0
| 843 |
function Bibliography() {
this.searchOptions = {
source: 0,
value: '',
style: ''
};
this.bibliography = [];
this.bibliographyText = [];
this.localStorageKey = "Bibliography";
};
Bibliography.prototype.showBookSearchResult = function (results) {
for (key in results) {
var title = results[key].display.title;
var description = "";
var contributors = results[key].data.contributors;
contributors.forEach(function (contributors_item) {
if (description != "") {
description += ", " + contributors_item.first + " " + contributors_item.last;
} else {
description += contributors_item.first + " " + contributors_item.last;
}
});
if (results[key].display.publisher) {
description += " - " + results[key].display.publisher;
}
if (results[key].display.year) {
description += " - " + results[key].display.year;
}
createSearchItem(title, description, key);
}
};
Bibliography.prototype.showJournalSearchResult = function(results) {
results.forEach(function(results_item, i) {
var title = results_item.data.journal.title;
var description = results_item.data.pubjournal.title;
createSearchItem(title, description, i);
});
};
Bibliography.prototype.showWebSiteSearchResult = function (results) {
var urlSearchResult;
results.forEach(function(results_item, i) {
try {
urlSearchResult = new URL(results_item.display.displayurl).hostname;
} catch(error) {
urlSearchResult = results_item.display.displayurl;
}
createSearchItem(results_item.display.title + "(" + results_item.display.displayurl + ")", results_item.display.summary, i);
});
};
Bibliography.prototype.createCitations = function (id, data) {
var biblist = this;
$.ajax({
url: '/api/2.0/files/easybib-citation',
type: "POST",
data: {
citationData: JSON.stringify(data)
},
success: function(answer) {
if (answer.response.success) {
try {
var citation = JSON.parse(answer.response.citation);
if (citation.status === 'ok') {
biblist.bibliographyText.push({
id: id,
data: citation.data
});
createBibItem(id, citation.data);
}
} catch(e) {
console.log(e.message);
}
}
},
});
};
Bibliography.prototype.updateCitations = function (id, data) {
var biblist = this;
$.ajax({
url: '/api/2.0/files/easybib-citation',
type: "POST",
data: {
citationData: JSON.stringify(data)
},
success: function (answer) {
if (answer.response.success) {
try {
var citation = JSON.parse(answer.response.citation);
if (citation.status === 'ok') {
if (biblist.bibliographyText.length > 0) {
biblist.bibliographyText.forEach(function (item) {
if (item.id == id) {
item.data = citation.data;
}
});
}
createBibItem(id, citation.data);
}
} catch (e) {
console.log(e.message);
}
}
},
});
};
Bibliography.prototype.getCitation = function (id, foundData, bibliographyStorage, fileId) {
var biblist = this;
var source = foundData.results[id];
var data;
switch (this.searchOptions.source) {
case 0:
data = {
style: $('#styles option:selected').val() ? $('#styles option:selected').val() : "3d-research",
pubtype: source.data.pubtype,
pubnonperiodical: source.data.pubnonperiodical,
contributors: source.data.contributors,
other: source.data.other,
source: source.data.source
};
break;
case 1:
data = {
style: $('#styles option:selected').val() ? $('#styles option:selected').val() : "3d-research",
pubtype: source.data.pubtype,
pubjournal: source.data.pubjournal,
publication_type: source.data.publication_type,
contributors: source.data.contributors,
other: source.data.other,
source: source.data.source
};
break;
case 2:
source.data.pubonline.url = source.display.displayurl;
data = {
style: $('#styles option:selected').val() ? $('#styles option:selected').val() : "3d-research",
autocite: source.display.displayurl,
pubtype: source.data.pubtype,
pubonline: source.data.pubonline,
other: source.data.other,
website: source.data.website,
source: source.data.source
};
break;
default:
break;
};
$.ajax({
url: '/api/2.0/files/easybib-citation',
type: "POST",
data: {
citationData: JSON.stringify(data)
},
success: function (answer) {
if (answer.response.success && answer.response.citations != "error") {
var citation = JSON.parse(answer.response.citation);
if (citation.status === 'ok') {
saveCitation.call(biblist, citation, data, bibliographyStorage, fileId);
} else if (citation.status === 'error') {
alert("ERROR. " + citation.msg);
}
}
return true;
},
error: function(err) {
console.log(err);
}
});
};
Bibliography.prototype.fillListStyles = function (styles, bibliographyStyle) {
var selectStyles = $('#styles');
for (var style in styles) {
var value = styles[style];
selectStyles[0].options[selectStyles[0].options.length] = new Option(value, style);
}
$("#styles :first").remove();
selectStyles.attr('disabled', false);
if (bibliographyStyle) {
$("#styles option[value=" + bibliographyStyle + "]").attr('selected', 'true');
}
};
function escapeHtml(str) {
if (str) return $('<div />').text(str).html();
else return null;
};
function createSearchItem(title, description, id) {
var searchResult = $("#search_result");
$("#search_result").show();
$(".result-container .search-title").show();
$(".result-container .bibliography-title").hide();
$("#bib").hide();
searchResult.show();
var item =
"<div class=\"search-item\" id=" + escapeHtml(id) + ">" +
"<div class = \"citation\">" +
"<h4 style=\"overflow-x: hidden;margin:0\">" + escapeHtml(title) + "</h4>" +
"<p style=\";margin:0\">" + escapeHtml(description) + "</p>" +
"</div>" +
"<div class=\"add-button-container\">" +
"<div class=\"add-button\" onclick=\"addItem(this)\"></div>" +
"</div>" +
"</div>";
$('#titleContent').text('Your Search Results');
searchResult.append(item);
};
function createBibItem(id, data) {
var item = "<div class=\"bibliography-part\">" +
"<div class=\"bibliography-part-data\">" + escapeHtml(data) + "</div>" +
"<div class=\"del-button-container\">" +
"<div onclick=\"delBibliographyPart(this)\" id=bibliography-path_" + escapeHtml(id) + " class=\"del-bibliography-part\"></div>" +
"</div>" +
"</br>";
$('#bib').append(item);
};
function saveCitation(citation, data, bibliographyStorage, fileId) {
var id, bibliographyItem;
if (this.bibliography.length > 0) {
id = this.bibliography[this.bibliography.length - 1].id + 1;
} else {
id = 1;
}
bibliographyItem = {
id: id,
data: data
};
this.bibliography.push(bibliographyItem);
this.bibliographyText.push({ id: id, data: citation.data });
$("#search_result").empty();
$("#search_result").hide();
$(".result-container .search-title").hide();
$(".result-container .bibliography-title").show();
$("#bib").show();
createBibItem(id, citation.data);
if (!localStorageManager.isAvailable || fileId == null) {
return null;
} else {
if (bibliographyStorage) {
bibliographyStorage[fileId] = this.bibliography;
localStorageManager.setItem(this.localStorageKey, bibliographyStorage);
} else {
bibliographyStorage = {};
bibliographyStorage[fileId] = this.bibliography;
localStorageManager.setItem(this.localStorageKey, bibliographyStorage);
}
}
};
|
ONLYOFFICE/CommunityServer
|
web/studio/ASC.Web.Studio/ThirdParty/plugin/easybib/bibliography.js
|
JavaScript
|
apache-2.0
| 9,168 |
package com.landian.crud.core.dao;
import com.landian.commons.page.PageListSupport;
import com.landian.commons.page.PageRequest;
import com.landian.crud.core.builder.SelectBuilder;
import com.landian.crud.core.builder.SqlBuilder;
import com.landian.crud.core.builder.impl.*;
import com.landian.crud.core.context.ResultMapContext;
import com.landian.crud.core.context.SystemContextFactory;
import com.landian.crud.core.context.impl.HashMapResultContext;
import com.landian.crud.core.converter.ResultContextConverter;
import com.landian.crud.core.converter.ResultContextConverterFactory;
import com.landian.crud.core.converter.impl.JavaBeanConverter;
import com.landian.crud.core.provider.ProviderHelper;
import com.landian.crud.core.result.SingleValue;
import com.landian.crud.core.result.StatisticMap;
import com.landian.crud.core.result.StatisticMapBuilder;
import com.landian.crud.core.sql.DeleteSQLBuilder;
import com.landian.crud.core.sql.InsertSQLBuilder;
import com.landian.crud.core.sql.PageSqlAdapter;
import com.landian.crud.core.sql.UpdateSQLBuilder;
import com.landian.sql.jpa.annotation.IdTypePolicy;
import com.landian.sql.jpa.context.BeanContext;
import com.landian.sql.jpa.context.ResultMapConfig;
import com.landian.sql.jpa.context.ResultMappingVirtual;
import com.landian.sql.jpa.criterion.Criterion;
import com.landian.sql.jpa.criterion.CriterionAppender;
import com.landian.sql.jpa.criterion.FieldAppender;
import com.landian.sql.jpa.criterion.Restrictions;
import com.landian.sql.jpa.log.JieLoggerProxy;
import com.landian.sql.jpa.order.Order;
import com.landian.sql.jpa.order.OrderAppender;
import com.landian.sql.jpa.sql.SelectUnitAppender;
import com.landian.sql.jpa.sql.SelectUnitRestrictions;
import com.landian.sql.jpa.sql.UpdateUnitAppender;
import com.landian.sql.jpa.utils.ConvertUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* ProxyDaoSupport
* @author cao.jl
* to be continute
* * 毫无疑问,SpringData才是大神版的进化封装
*/
@Repository
public class ProxyDaoSupport<T> {
@SuppressWarnings("unused")
private static final Logger logger = Logger.getLogger(ProxyDaoSupport.class);
private static final Logger infoSQLLogger = Logger.getLogger("infoSQL");
@Autowired
private ProxyDao proxyDao;
private void proxyInfo(Object object){
if(null != infoSQLLogger){
infoSQLLogger.info(object);
}
}
/**
* 指定ID业务Bean是否存在
* @param beanId
* @param beanContext
* @return
*/
public boolean isExist(long beanId,BeanContext beanContext) {
String beanIdColumn = this.getBeanIdColumn(beanContext);
CommonSelectSQLBuilder builder = CommonSelectSQLBuilder.getInstance(beanContext.getTableName(),
SelectUnitRestrictions.column(beanIdColumn),
Restrictions.eq(beanIdColumn, beanId),
Order.asc(beanIdColumn));
HashMapResultContext hashMapResultContext = this.doFind(builder.SQL());
if(hashMapResultContext.getResultCount() > 0){
return true;
}
return false;
}
public int doInsert(String sql){
return proxyDao.doInsert(sql);
}
/**
* 插入对像bean
* @param bean
* @param beanContext
*/
public int insertWithId(Object bean,BeanContext beanContext){
String sql = InsertSQLBuilder.insertWithIdSQL(bean, beanContext);
return proxyDao.doInsertWidthId(sql);
}
/**
* 插入对像bean
* @param bean
* @param beanContext
*/
public void insert(Object bean,BeanContext beanContext){
String sql = InsertSQLBuilder.insertSQL(bean, beanContext);
Object idObject = proxyDao.doInsertAndReturnId(sql);
//回填ID
refillId(bean, beanContext, idObject);
}
/**
* 回填业务Bean ID
*/
private void refillId(Object bean, BeanContext beanContext,Object idObject){
try {
String idFieldName = beanContext.getIdFieldName();
String setMethodName = ProviderHelper.toSetMethodName(idFieldName);
Method idSetMethod = null;
SingleValue idSingleValue = SingleValue.newInstance(idObject);
Object fixIdObject = null;
if(IdTypePolicy.INTEGER == beanContext.getIdType()){
fixIdObject = idSingleValue.integerValue();
idSetMethod = bean.getClass().getDeclaredMethod(setMethodName,Integer.class);
}else if(IdTypePolicy.LONG == beanContext.getIdType()){
fixIdObject = idSingleValue.longValue();
idSetMethod = bean.getClass().getDeclaredMethod(setMethodName,Long.class);
}else if(IdTypePolicy.BIGDECIMAL == beanContext.getIdType()){
fixIdObject = idSingleValue.bigDecimalValue();
idSetMethod = bean.getClass().getDeclaredMethod(setMethodName,BigDecimal.class);
}
if(null == idSetMethod){
String msg = MessageFormat.format("ID回填策略未实现,目标对像[{0}],目标类型[{1}]",bean,idObject);
logger.warn(msg);
}else{
idSetMethod.invoke(bean,new Object[]{fixIdObject});
}
}catch (Exception e) {
String errorMsg = "回填业务Bean ID异常!";
JieLoggerProxy.error(logger, errorMsg);
JieLoggerProxy.error(logger, e);
throw new RuntimeException(errorMsg);
}
}
/**
* 更新对像非空属性值
* @param bean
* @param beanContext
*/
public int updateNotNull(Object bean, BeanContext beanContext) {
String sql = UpdateSQLBuilder.updateNotNull(bean, beanContext);
return proxyDao.doUpdate(sql);
}
/**
* 更新字段
* @param updateUnitAppender 更新单元追加器
* @param criterionAppender 条件追加器
* @param beanContext
* @return
* @throws Exception
*/
public int update(UpdateUnitAppender updateUnitAppender, CriterionAppender criterionAppender,
BeanContext beanContext){
String sql = UpdateSqlBuilder.getInstance(beanContext.getTableName(), updateUnitAppender, criterionAppender).SQL();
return proxyDao.doUpdate(sql);
}
/**
* 更新对像非空属性值
* @param sql
*/
public int doUpdate(String sql) {
return proxyDao.doUpdate(sql);
}
/**
* 查询统计
* 由于经常需要根据ID(某属性作为key),统计某属性总计(某属性总计作为Value)
* @param sql
* @param resultMapConfig
* @return
*/
public StatisticMap queryAsStatisticMap(String sql,ResultMapConfig resultMapConfig) {
HashMapResultContext hashMapResultContext = doFind(sql);
return StatisticMapBuilder.buildStatisticMap(resultMapConfig, hashMapResultContext);
}
/**
* 根据SQL查询,返回结果集
* @param sql
*/
public HashMapResultContext doFind(String sql){
proxyInfo(sql);
List<Map<String, Object>> resultContext = proxyDao.doFind(sql);
HashMapResultContext hashMapResultContext = new HashMapResultContext(resultContext);
return hashMapResultContext;
}
/**
* 根据SQL查询,返回结果集
* @param sql
*/
public List doFind(String sql,Class clazz){
proxyInfo(clazz);
proxyInfo(sql);
//转换器
ResultContextConverter converter = JavaBeanConverter.newInstance(clazz);
//适配调用
return this.doFind(sql, converter);
}
/**
* @param sql
* @param converter 结果集转换器
*/
public List<T> doFindPage(String sql, int start, int pageSize, ResultContextConverter converter){
PageSqlAdapter pageSqlAdapter = SystemContextFactory.getPageSqlAdapter();
String pageSQL = pageSqlAdapter.wrapSQL(sql,start,pageSize);
//结果集
List<Map<String, Object>> resultList = proxyDao.doFind(pageSQL);
//处理结果集
List<T> beanList = new ArrayList<T>();
if(CollectionUtils.isNotEmpty(resultList)){
for(Map<String, Object> dataMap : resultList){
@SuppressWarnings("unchecked")
T bean = (T) converter.convert(dataMap);
beanList.add(bean);
}
}
return beanList;
}
/**
* @param sql
* @param converter 结果集转换器
*/
public List<T> doFind(String sql, ResultContextConverter converter){
//结果集
HashMapResultContext hashMapResultContext = this.doFind(sql);
//处理结果集
List<T> beanList = new ArrayList<T>();
List<Map<String, Object>> resultList = hashMapResultContext.getResultObject();
if(CollectionUtils.isNotEmpty(resultList)){
for(Map<String, Object> dataMap : resultList){
@SuppressWarnings("unchecked")
T bean = (T) converter.convert(dataMap);
beanList.add(bean);
}
}
return beanList;
}
/**
* 根据ID查询对像
* @param beanId
* @param beanContext
*/
public T queryById(int beanId, BeanContext beanContext){
Integer id = beanId;
return this.queryById(id.longValue(), beanContext);
}
/**
* 根据ID查询对像
* @param beanId
* @param beanContext
*/
public T queryById(long beanId,BeanContext beanContext){
List<Long> ids = new ArrayList<Long>();
ids.add(beanId);
List<T> list = this.queryByIds(ids,beanContext);
if(!CollectionUtils.isEmpty(list)){
return list.get(0);
}
return null;
}
/**
* 根据ID列表,查询对像集
* @param beanContext
* @param ids
*/
public List<T> queryByIds(BeanContext beanContext,List<Integer> ids){
if(CollectionUtils.isEmpty(ids)){
return Collections.EMPTY_LIST;
}
return queryByIds(ConvertUtils.Int2long(ids),beanContext);
}
/**
* 根据ID列表,查询对像集
* @param ids
* @param beanContext
*/
public List<T> queryByIds(List<Long> ids,BeanContext beanContext){
if(CollectionUtils.isEmpty(ids)){
return Collections.EMPTY_LIST;
}
CriterionAppender criterionAppender = CriterionAppender.newInstance();
String column = beanContext.getIdFieldName();
criterionAppender.add(Restrictions.in(column, ids,0l));
List<T> beanList = queryBean(beanContext,criterionAppender);
return beanList;
}
/**
* 此方法非元数据表,谨慎使用
* 查询Bean全部对像
* @param beanContext
*/
public List<T> queryBeanAll(BeanContext beanContext) {
return queryBean(beanContext,null,null);
}
/**
* 查询Bean
* @param beanContext
* @param criterionAppender
*/
public List<T> queryBean(BeanContext beanContext,CriterionAppender criterionAppender) {
return queryBean(beanContext,criterionAppender,null);
}
/**
* 查询Bean
* @param beanContext
* @param proxyOrderAppender
*/
public List<T> queryBean(BeanContext beanContext,OrderAppender proxyOrderAppender) {
return queryBean(beanContext,null,proxyOrderAppender);
}
/**
* 查询bean
* @param beanContext
* @param criterionAppender
* @param proxyOrderAppender
*/
public List<T> queryBean(BeanContext beanContext,CriterionAppender criterionAppender,
OrderAppender proxyOrderAppender) {
String tableName = beanContext.getTableName();
//选择器
Class<T> beanClass = beanContext.getBeanClass();
SelectBuilder selectBuilder = SelectBuilderFactory.builder(beanClass);
SqlBuilder sqlBuilder = SqlBuilderFactory.builder(tableName, beanClass, selectBuilder, criterionAppender, proxyOrderAppender);
//转换器
ResultContextConverter resultContextConverter = ResultContextConverterFactory.build(beanContext.getBeanClass());
//数据集
List<T> beanList = queryBean(sqlBuilder,resultContextConverter);
return beanList;
}
/**
* 查询bean
* @param beanContext
* @param criterionAppender
* @param proxyOrderAppender
*/
public List<T> queryBeanField(BeanContext beanContext,FieldAppender fieldAppender, CriterionAppender criterionAppender,
OrderAppender proxyOrderAppender) {
String tableName = beanContext.getTableName();
//选择器
SelectBuilder selectBuilder = new FieldAppenderSelectBuilder(fieldAppender,beanContext.getBeanClass());
SqlBuilder sqlBuilder = SqlBuilderFactory.builder(tableName, beanContext.getBeanClass(), selectBuilder, criterionAppender, proxyOrderAppender);
//转换器
ResultContextConverter resultContextConverter = ResultContextConverterFactory.build(beanContext.getBeanClass());
//数据集
List<T> beanList = queryBean(sqlBuilder, resultContextConverter);
return beanList;
}
/**
* 查询对像信息
* @param tableName
* @param clazz
* @param selectUnitAppender
* @param criterionAppender
* @param proxyOrderAppender
* @return
*/
public HashMapResultContext queryBeanInfo(String tableName, Class clazz,SelectUnitAppender selectUnitAppender,
CriterionAppender criterionAppender, OrderAppender proxyOrderAppender) {
//选择器
SelectBuilder selectBuilder = SelectBuilderFactory.builder(selectUnitAppender);
//SQL构建器
SqlBuilder sqlBuilder = SqlBuilderFactory.builder(tableName, clazz, selectBuilder, criterionAppender, proxyOrderAppender);
//结果集
String sql = sqlBuilder.SQL();
HashMapResultContext hashMapResultContext = this.doFind(sql);
return hashMapResultContext;
}
/**
* 决定不重载此方法基于以下理由
* 1.分页查询大部份情况需要追加条件和排序
* 2.参数已经没有更好重构感觉,个人感觉不能再少了,
* 重载会令方法组数量变多
* 3.criterionAppender proxyOrderAppender 入参可为null,
* 里面的SqlBuilder已作了相应处理,
* @param beanContext
* @param criterionAppender
* @param proxyOrderAppender
* @param pageRequest
* @return
*/
public PageListSupport<T> queryBeanPage(BeanContext beanContext,
CriterionAppender criterionAppender,OrderAppender proxyOrderAppender,PageRequest pageRequest) {
String tableName = beanContext.getTableName();
//选择器
SelectBuilder selectBuilder = SelectBuilderFactory.builder(beanContext.getBeanClass());
//加强条件追加器
if(null == criterionAppender){
criterionAppender = CriterionAppender.newInstance();
}
//SQL建造器
SqlBuilder sqlBuilder = SqlBuilderFactory.builder(tableName, beanContext.getBeanClass(), selectBuilder, criterionAppender, proxyOrderAppender);
//转换器
ResultContextConverter resultContextConverter = ResultContextConverterFactory.build(beanContext.getBeanClass());
//分页查询
int start = getPageStart(pageRequest);
int size = pageRequest.getPageSize();
List<T> beanList = doFindPage(sqlBuilder.SQL(), start, size, resultContextConverter);
//封装PageListSupport
PageListSupport<T> pageListSupport = new PageListSupport<T>();
//总数查询
Long count = 0l;
if(!CollectionUtils.isEmpty(beanList)){
HashMapResultContext hashMapResultContext = this.doFind(sqlBuilder.SQLCount());
Object countObj = hashMapResultContext.singleResult();
if(null != countObj){
SingleValue singleValue = SingleValue.newInstance(countObj);
count = singleValue.longValue();
}
}
pageListSupport.setList(beanList);
pageListSupport.setCount(count);
pageListSupport.setPageIndex(pageRequest.getPageIndex());
pageListSupport.setPageSize(pageRequest.getPageSize());
return pageListSupport;
}
/**
* 查询Bean
* @param sqlBuilder
* @param resultContextConverter
*/
public List<T> queryBean(SqlBuilder sqlBuilder,ResultContextConverter resultContextConverter) {
return doFind(sqlBuilder.SQL(), resultContextConverter);
}
/**
* 得到业务Bean的id字段
* @param beanContext
* @return
*/
public String getBeanIdColumn(BeanContext beanContext){
String idFieldName = beanContext.getIdFieldName();
Map<String, ResultMappingVirtual> resultMappingMap = ResultMapContext.getResultMappingMap(beanContext.getBeanClass());
String columnName = resultMappingMap.get(idFieldName).getColumn();
return columnName;
}
/**
* author jie
* date 15/08/21
* 根据业务BeanID删除业务Bean
* @param beanId
* @param beanContext
* @return
*/
public int deleteById(long beanId, BeanContext beanContext) {
Criterion criterion = buildIdCriterion(beanId, beanContext);
String sql = DeleteSQLBuilder.buildDeleteSQL(beanContext.getTableName(), criterion);
return doDelete(sql);
}
/**
* 构建ID条件
* @param id
* @param beanContext
* @return
*/
public Criterion buildIdCriterion(long id, BeanContext beanContext){
String beanIdColumn = getBeanIdColumn(beanContext);
return Restrictions.eq(beanIdColumn,id);
}
/**
* 构建ID条件
* @param ids
* @param beanContext
* @return
*/
public Criterion buildIdCriterion(List<Long> ids, BeanContext beanContext){
String beanIdColumn = getBeanIdColumn(beanContext);
return Restrictions.in(beanIdColumn,ids,0l);
}
/**
* 根据ID批量删除
* @param ids
* @param beanContext
* @return
*/
public int deleteByIdLong(List<Long> ids,BeanContext beanContext) {
if(CollectionUtils.isEmpty(ids)){
return 0;
}
Criterion criterion = buildIdCriterion(ids, beanContext);
String sql = DeleteSQLBuilder.buildDeleteSQL(beanContext.getTableName(), criterion);
return doDelete(sql);
}
public int doDelete(String sql){
return proxyDao.doDelete(sql);
}
/**
* 得到构建的查询SQL
* @param beanContext
* @param criterionAppender
* @param proxyOrderAppender
* @return
*/
public String getQuerySQL(BeanContext beanContext, CriterionAppender criterionAppender, OrderAppender proxyOrderAppender) {
//选择器
SelectBuilder selectBuilder = SelectBuilderFactory.builder(beanContext.getBeanClass());
//SQL建造器
String tableName = beanContext.getTableName();
SqlBuilder sqlBuilder = SqlBuilderFactory.builder(tableName, beanContext.getBeanClass(), selectBuilder, criterionAppender, proxyOrderAppender);
return sqlBuilder.SQL();
}
private int getPageStart(PageRequest pageRequest) {
return pageRequest.getPageIndex() * pageRequest.getPageSize(); //起始分页位置
}
/**
* 根据SQL查询,返回结果集第一个对像
* @param sql
*/
public Object queryAsValueFirst(String sql,Class clazz){
List list = this.doFind(sql, clazz);
if(CollectionUtils.isEmpty(list)){
return null;
}
return list.get(0);
}
/**
* 根据SQL查询,返回单值列表结果
* @param sql
*/
public List queryAsValueList(String sql){
List list = Collections.EMPTY_LIST;
HashMapResultContext hashMapResultContext = doFind(sql);
if(hashMapResultContext.getResultCount() > 0){
list = hashMapResultContext.singleList();
}
return list;
}
/**
* 查询为SingleValue
* @param sql
* @return
*/
public SingleValue queryAsSingleValue(String sql) {
HashMapResultContext hashMapResultContext = this.doFind(sql);
Object object = hashMapResultContext.singleResult();
SingleValue singleValue = SingleValue.newInstance(object);
return singleValue;
}
/**
* 查询为SingleValueInt
* @param sql
* @return
*/
public int queryAsSingleValueInt(String sql) {
SingleValue singleValue = queryAsSingleValue(sql);
return singleValue.integerValue();
}
/**
* 查询为SingleValueLong
* @param sql
* @return
*/
public long queryAsSingleValueLong(String sql) {
SingleValue singleValue = queryAsSingleValue(sql);
return singleValue.longValue();
}
/**
* 查询为Long值列表
* @param sql
* @return
*/
public List<Long> queryAsLongValue(String sql) {
return proxyDao.queryAsLongValue(sql);
}
/**
* 查询为Long值列表
* @param sql
* @param start 开始位置
* @param size 查询大小
* @return
*/
public List<Long> queryAsLongValue(String sql, int start, int size) {
PageSqlAdapter pageSqlAdapter = SystemContextFactory.getPageSqlAdapter();
String sqlQ = pageSqlAdapter.wrapSQL(sql, start, size);
return proxyDao.queryAsLongValue(sqlQ);
}
/**
* 查询为Integer值列表
* @param sql
* @return
*/
public List<Integer> queryAsIntValue(String sql) {
return proxyDao.queryAsIntValue(sql);
}
/**
* 查询为Integer值列表
* @param sql
* @param start 开始位置
* @param size 查询大小
* @return
*/
public List<Integer> queryAsIntValue(String sql, int start, int size) {
PageSqlAdapter pageSqlAdapter = SystemContextFactory.getPageSqlAdapter();
String sqlQ = pageSqlAdapter.wrapSQL(sql, start, size);
return proxyDao.queryAsIntValue(sqlQ);
}
}
|
caojieliang/crud-core
|
src/main/java/com/landian/crud/core/dao/ProxyDaoSupport.java
|
Java
|
apache-2.0
| 20,038 |
package site.huozhu.home.controller.form;
/**
* ${DESCRIPTION}
*
* @author chuanxue.mcx
* @date 2017/09/06
*/
public class PagenationForm {
private int page = 1;
private int pageSize = 20;
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
}
|
tomtrije/huozhu
|
server/huozhu/java/src/main/java/site/huozhu/home/controller/form/PagenationForm.java
|
Java
|
apache-2.0
| 483 |
/*
* Copyright 2014-2021 Lukas Krejci
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.revapi.java.checks.annotations;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import org.revapi.Difference;
import org.revapi.java.spi.CheckBase;
import org.revapi.java.spi.Code;
import org.revapi.java.spi.JavaAnnotationElement;
import org.revapi.java.spi.Util;
/**
* @author Lukas Krejci
*
* @since 0.1
*/
public final class Removed extends CheckBase {
@Override
protected List<Difference> doVisitAnnotation(JavaAnnotationElement oldAnnotation,
JavaAnnotationElement newAnnotation) {
if (oldAnnotation != null && newAnnotation == null && isAccessible(oldAnnotation.getParent())) {
return Collections.singletonList(createDifference(Code.ANNOTATION_REMOVED,
Code.attachmentsFor(oldAnnotation.getParent(), null, "annotationType",
Util.toHumanReadableString(oldAnnotation.getAnnotation().getAnnotationType()), "annotation",
Util.toHumanReadableString(oldAnnotation.getAnnotation()))));
}
return null;
}
@Override
public EnumSet<Type> getInterest() {
return EnumSet.of(Type.ANNOTATION);
}
}
|
revapi/revapi
|
revapi-java/src/main/java/org/revapi/java/checks/annotations/Removed.java
|
Java
|
apache-2.0
| 1,860 |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace Secure_Password_Repository.ViewModels
{
public class CategoryEdit
{
public Int32? CategoryId { get; set; }
[Required]
public string CategoryName { get; set; }
}
public class CategoryAdd : CategoryEdit
{
public Int32? Category_ParentID { get; set; }
}
public class CategoryDelete
{
public Int32? CategoryId { get; set; }
}
public class CategoryItem : CategoryEdit
{
[Required]
public Int32 Category_ParentID { get; set; }
public virtual ICollection<CategoryItem> SubCategories { get; set; }
public virtual ICollection<PasswordItem> Passwords { get; set; }
}
public class CategoryDisplayItem
{
public CategoryItem categoryListItem { get; set; }
public CategoryAdd categoryAddItem { get; set; }
public PasswordAdd passwordAddItem { get; set; }
}
}
|
thatcoderguy/Secure-Password-Repository
|
Secure Password Repository/ViewModels/CategoryViewModels.cs
|
C#
|
apache-2.0
| 1,022 |
package com.ahars.domain.util;
import java.sql.Types;
import org.hibernate.dialect.H2Dialect;
public class FixedH2Dialect extends H2Dialect {
public FixedH2Dialect() {
super();
registerColumnType( Types.FLOAT, "real" );
}
}
|
ahars/dataviz-jhipster
|
src/main/java/com/ahars/domain/util/FixedH2Dialect.java
|
Java
|
apache-2.0
| 251 |
package org.hl7.fhir.instance.model;
import java.util.*;
import org.hl7.fhir.instance.utils.IWorkerContext;
import org.hl7.fhir.utilities.Utilities;
public class ExpressionNode {
public enum Kind {
Name, Function, Constant, Group
}
public static class SourceLocation {
private int line;
private int column;
public SourceLocation(int line, int column) {
super();
this.line = line;
this.column = column;
}
public int getLine() {
return line;
}
public int getColumn() {
return column;
}
public void setLine(int line) {
this.line = line;
}
public void setColumn(int column) {
this.column = column;
}
public String toString() {
return Integer.toString(line)+", "+Integer.toString(column);
}
}
public enum Function {
Custom,
Empty, Not, Exists, SubsetOf, SupersetOf, IsDistinct, Distinct, Count, Where, Select, All, Repeat, Item /*implicit from name[]*/, As, Is, Single,
First, Last, Tail, Skip, Take, Iif, ToInteger, ToDecimal, ToString, Substring, StartsWith, EndsWith, Matches, ReplaceMatches, Contains, Replace, Length,
Children, Descendants, MemberOf, Trace, Today, Now, Resolve, Extension;
public static Function fromCode(String name) {
if (name.equals("empty")) return Function.Empty;
if (name.equals("not")) return Function.Not;
if (name.equals("exists")) return Function.Exists;
if (name.equals("subsetOf")) return Function.SubsetOf;
if (name.equals("supersetOf")) return Function.SupersetOf;
if (name.equals("isDistinct")) return Function.IsDistinct;
if (name.equals("distinct")) return Function.Distinct;
if (name.equals("count")) return Function.Count;
if (name.equals("where")) return Function.Where;
if (name.equals("select")) return Function.Select;
if (name.equals("all")) return Function.All;
if (name.equals("repeat")) return Function.Repeat;
if (name.equals("item")) return Function.Item;
if (name.equals("as")) return Function.As;
if (name.equals("is")) return Function.Is;
if (name.equals("single")) return Function.Single;
if (name.equals("first")) return Function.First;
if (name.equals("last")) return Function.Last;
if (name.equals("tail")) return Function.Tail;
if (name.equals("skip")) return Function.Skip;
if (name.equals("take")) return Function.Take;
if (name.equals("iif")) return Function.Iif;
if (name.equals("toInteger")) return Function.ToInteger;
if (name.equals("toDecimal")) return Function.ToDecimal;
if (name.equals("toString")) return Function.ToString;
if (name.equals("substring")) return Function.Substring;
if (name.equals("startsWith")) return Function.StartsWith;
if (name.equals("endsWith")) return Function.EndsWith;
if (name.equals("matches")) return Function.Matches;
if (name.equals("replaceMatches")) return Function.ReplaceMatches;
if (name.equals("contains")) return Function.Contains;
if (name.equals("replace")) return Function.Replace;
if (name.equals("length")) return Function.Length;
if (name.equals("children")) return Function.Children;
if (name.equals("descendants")) return Function.Descendants;
if (name.equals("memberOf")) return Function.MemberOf;
if (name.equals("trace")) return Function.Trace;
if (name.equals("today")) return Function.Today;
if (name.equals("now")) return Function.Now;
if (name.equals("resolve")) return Function.Resolve;
if (name.equals("extension")) return Function.Extension;
return null;
}
public String toCode() {
switch (this) {
case Empty : return "empty";
case Not : return "not";
case Exists : return "exists";
case SubsetOf : return "subsetOf";
case SupersetOf : return "supersetOf";
case IsDistinct : return "isDistinct";
case Distinct : return "distinct";
case Count : return "count";
case Where : return "where";
case Select : return "select";
case All : return "all";
case Repeat : return "repeat";
case Item : return "item";
case As : return "as";
case Is : return "is";
case Single : return "single";
case First : return "first";
case Last : return "last";
case Tail : return "tail";
case Skip : return "skip";
case Take : return "take";
case Iif : return "iif";
case ToInteger : return "toInteger";
case ToDecimal : return "toDecimal";
case ToString : return "toString";
case Substring : return "substring";
case StartsWith : return "startsWith";
case EndsWith : return "endsWith";
case Matches : return "matches";
case ReplaceMatches : return "replaceMatches";
case Contains : return "contains";
case Replace : return "replace";
case Length : return "length";
case Children : return "children";
case Descendants : return "descendants";
case MemberOf : return "memberOf";
case Trace : return "trace";
case Today : return "today";
case Now : return "now";
case Resolve : return "resolve";
case Extension : return "extension";
default: return "??";
}
}
}
public enum Operation {
Equals, Equivalent, NotEquals, NotEquivalent, LessThen, Greater, LessOrEqual, GreaterOrEqual, Is, As, Union, Or, And, Xor, Implies,
Times, DivideBy, Plus, Minus, Concatenate, Div, Mod, In, Contains;
public static Operation fromCode(String name) {
if (Utilities.noString(name))
return null;
if (name.equals("="))
return Operation.Equals;
if (name.equals("~"))
return Operation.Equivalent;
if (name.equals("!="))
return Operation.NotEquals;
if (name.equals("!~"))
return Operation.NotEquivalent;
if (name.equals(">"))
return Operation.Greater;
if (name.equals("<"))
return Operation.LessThen;
if (name.equals(">="))
return Operation.GreaterOrEqual;
if (name.equals("<="))
return Operation.LessOrEqual;
if (name.equals("|"))
return Operation.Union;
if (name.equals("or"))
return Operation.Or;
if (name.equals("and"))
return Operation.And;
if (name.equals("xor"))
return Operation.Xor;
if (name.equals("is"))
return Operation.Is;
if (name.equals("as"))
return Operation.As;
if (name.equals("*"))
return Operation.Times;
if (name.equals("/"))
return Operation.DivideBy;
if (name.equals("+"))
return Operation.Plus;
if (name.equals("-"))
return Operation.Minus;
if (name.equals("&"))
return Operation.Concatenate;
if (name.equals("implies"))
return Operation.Implies;
if (name.equals("div"))
return Operation.Div;
if (name.equals("mod"))
return Operation.Mod;
if (name.equals("in"))
return Operation.In;
if (name.equals("contains"))
return Operation.Contains;
return null;
}
public String toCode() {
switch (this) {
case Equals : return "=";
case Equivalent : return "~";
case NotEquals : return "!=";
case NotEquivalent : return "!~";
case Greater : return ">";
case LessThen : return "<";
case GreaterOrEqual : return ">=";
case LessOrEqual : return "<=";
case Union : return "|";
case Or : return "or";
case And : return "and";
case Xor : return "xor";
case Times : return "*";
case DivideBy : return "/";
case Plus : return "+";
case Minus : return "-";
case Concatenate : return "&";
case Implies : return "implies";
case Is : return "is";
case As : return "as";
case Div : return "div";
case Mod : return "mod";
case In : return "in";
case Contains : return "contains";
default: return "??";
}
}
}
public enum CollectionStatus {
SINGLETON, ORDERED, UNORDERED
}
public static class TypeDetails {
@Override
public String toString() {
return (collectionStatus == null ? "" : collectionStatus.toString())+(types == null ? "[]" : types.toString());
}
private Set<String> types = new HashSet<String>();
private CollectionStatus collectionStatus;
public TypeDetails(CollectionStatus collectionStatus, String... names) {
super();
this.collectionStatus = collectionStatus;
for (String n : names)
this.types.add(n);
}
public TypeDetails(CollectionStatus collectionStatus, Set<String> names) {
super();
this.collectionStatus = collectionStatus;
for (String n : names)
this.types.add(n);
}
public void addType(String n) {
this.types.add(n);
}
public void addTypes(Collection<String> n) {
this.types.addAll(n);
}
public boolean hasType(IWorkerContext context, String... tn) {
for (String t: tn)
if (types.contains(t))
return true;
for (String t: tn) {
StructureDefinition sd = context.fetchResource(StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/"+t);
while (sd != null) {
if (types.contains(sd.getId()))
return true;
if (sd.hasBase())
sd = context.fetchResource(StructureDefinition.class, sd.getBase());
else
sd = null;
}
}
return false;
}
public void update(TypeDetails source) {
types.addAll(source.types);
if (collectionStatus == null)
collectionStatus = source.collectionStatus;
else if (source.collectionStatus == CollectionStatus.UNORDERED)
collectionStatus = source.collectionStatus;
else
collectionStatus = CollectionStatus.ORDERED;
}
public TypeDetails union(TypeDetails right) {
TypeDetails result = new TypeDetails(null);
if (right.collectionStatus == CollectionStatus.UNORDERED || collectionStatus == CollectionStatus.UNORDERED)
result.collectionStatus = CollectionStatus.UNORDERED;
else
result.collectionStatus = CollectionStatus.ORDERED;
result.types.addAll(types);
result.types.addAll(right.types);
return result;
}
public boolean hasNoTypes() {
return types.isEmpty();
}
public Set<String> getTypes() {
return types;
}
public TypeDetails toSingleton() {
TypeDetails result = new TypeDetails(CollectionStatus.SINGLETON);
result.types.addAll(types);
return result;
}
public CollectionStatus getCollectionStatus() {
return collectionStatus;
}
public boolean hasType(Set<String> tn) {
for (String t: tn)
if (types.contains(t))
return true;
return false;
}
public String describe() {
return types.toString();
}
public String getType() {
for (String t : types)
return t;
return null;
}
}
//the expression will have one of either name or constant
private String uniqueId;
private Kind kind;
private String name;
private String constant;
private Function function;
private List<ExpressionNode> parameters; // will be created if there is a function
private ExpressionNode inner;
private ExpressionNode group;
private Operation operation;
private boolean proximal; // a proximal operation is the first in the sequence of operations. This is significant when evaluating the outcomes
private ExpressionNode opNext;
private SourceLocation start;
private SourceLocation end;
private SourceLocation opStart;
private SourceLocation opEnd;
private TypeDetails types;
private TypeDetails opTypes;
public ExpressionNode(int uniqueId) {
super();
this.uniqueId = Integer.toString(uniqueId);
}
public String toString() {
StringBuilder b = new StringBuilder();
switch (kind) {
case Name:
b.append(name);
break;
case Function:
if (function == Function.Item)
b.append("[");
else {
b.append(name);
b.append("(");
}
boolean first = true;
for (ExpressionNode n : parameters) {
if (first)
first = false;
else
b.append(", ");
b.append(n.toString());
}
if (function == Function.Item)
b.append("]");
else {
b.append(")");
}
break;
case Constant:
b.append(Utilities.escapeJava(constant));
break;
case Group:
b.append("(");
b.append(group.toString());
b.append(")");
}
if (inner != null) {
b.append(".");
b.append(inner.toString());
}
if (operation != null) {
b.append(" ");
b.append(operation.toCode());
b.append(" ");
b.append(opNext.toString());
}
return b.toString();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getConstant() {
return constant;
}
public void setConstant(String constant) {
this.constant = constant;
}
public Function getFunction() {
return function;
}
public void setFunction(Function function) {
this.function = function;
if (parameters == null)
parameters = new ArrayList<ExpressionNode>();
}
public boolean isProximal() {
return proximal;
}
public void setProximal(boolean proximal) {
this.proximal = proximal;
}
public Operation getOperation() {
return operation;
}
public void setOperation(Operation operation) {
this.operation = operation;
}
public ExpressionNode getInner() {
return inner;
}
public void setInner(ExpressionNode value) {
this.inner = value;
}
public ExpressionNode getOpNext() {
return opNext;
}
public void setOpNext(ExpressionNode value) {
this.opNext = value;
}
public List<ExpressionNode> getParameters() {
return parameters;
}
public boolean checkName() {
if (!name.startsWith("$"))
return true;
else
return name.equals("$this");
}
public Kind getKind() {
return kind;
}
public void setKind(Kind kind) {
this.kind = kind;
}
public ExpressionNode getGroup() {
return group;
}
public void setGroup(ExpressionNode group) {
this.group = group;
}
public SourceLocation getStart() {
return start;
}
public void setStart(SourceLocation start) {
this.start = start;
}
public SourceLocation getEnd() {
return end;
}
public void setEnd(SourceLocation end) {
this.end = end;
}
public SourceLocation getOpStart() {
return opStart;
}
public void setOpStart(SourceLocation opStart) {
this.opStart = opStart;
}
public SourceLocation getOpEnd() {
return opEnd;
}
public void setOpEnd(SourceLocation opEnd) {
this.opEnd = opEnd;
}
public String getUniqueId() {
return uniqueId;
}
public int parameterCount() {
if (parameters == null)
return 0;
else
return parameters.size();
}
public String Canonical() {
StringBuilder b = new StringBuilder();
write(b);
return b.toString();
}
public String summary() {
switch (kind) {
case Name: return uniqueId+": "+name;
case Function: return uniqueId+": "+function.toString()+"()";
case Constant: return uniqueId+": "+constant;
case Group: return uniqueId+": (Group)";
}
return "??";
}
private void write(StringBuilder b) {
switch (kind) {
case Name:
b.append(name);
break;
case Constant:
b.append(constant);
break;
case Function:
b.append(function.toCode());
b.append('(');
boolean f = true;
for (ExpressionNode n : parameters) {
if (f)
f = false;
else
b.append(", ");
n.write(b);
}
b.append(')');
break;
case Group:
b.append('(');
group.write(b);
b.append(')');
}
if (inner != null) {
b.append('.');
inner.write(b);
}
if (operation != null) {
b.append(' ');
b.append(operation.toCode());
b.append(' ');
opNext.write(b);
}
}
public String check() {
switch (kind) {
case Name:
if (Utilities.noString(name))
return "No Name provided @ "+location();
break;
case Function:
if (function == null)
return "No Function id provided @ "+location();
for (ExpressionNode n : parameters) {
String msg = n.check();
if (msg != null)
return msg;
}
break;
case Constant:
if (Utilities.noString(constant))
return "No Constant provided @ "+location();
break;
case Group:
if (group == null)
return "No Group provided @ "+location();
else {
String msg = group.check();
if (msg != null)
return msg;
}
}
if (inner != null) {
String msg = inner.check();
if (msg != null)
return msg;
}
if (operation == null) {
if (opNext != null)
return "Next provided when it shouldn't be @ "+location();
}
else {
if (opNext == null)
return "No Next provided @ "+location();
else
opNext.check();
}
return null;
}
private String location() {
return Integer.toString(start.line)+", "+Integer.toString(start.column);
}
public TypeDetails getTypes() {
return types;
}
public void setTypes(TypeDetails types) {
this.types = types;
}
public TypeDetails getOpTypes() {
return opTypes;
}
public void setOpTypes(TypeDetails opTypes) {
this.opTypes = opTypes;
}
}
|
eug48/hapi-fhir
|
hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/ExpressionNode.java
|
Java
|
apache-2.0
| 17,778 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.password_manager;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.support.v7.app.AlertDialog;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.TextUtils;
import android.text.method.LinkMovementMethod;
import android.text.style.ClickableSpan;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.signin.AccountManagementFragment;
import org.chromium.components.url_formatter.UrlFormatter;
import org.chromium.ui.base.WindowAndroid;
import org.chromium.ui.widget.Toast;
/**
* A dialog offers the user the ability to choose credentials for authentication. User is
* presented with username along with avatar and full name in case they are available.
* Native counterpart should be notified about credentials user have chosen and also if user
* haven't chosen anything.
*/
public class AccountChooserDialog
implements DialogInterface.OnClickListener, DialogInterface.OnDismissListener {
private final Context mContext;
private final Credential[] mCredentials;
/**
* Title of the dialog, contains Smart Lock branding for the Smart Lock users.
*/
private final String mTitle;
private final int mTitleLinkStart;
private final int mTitleLinkEnd;
private final String mOrigin;
private final String mSigninButtonText;
private ArrayAdapter<Credential> mAdapter;
private boolean mIsDestroyed;
private boolean mWasDismissedByNative;
/**
* Holds the reference to the credentials which were chosen by the user.
*/
private Credential mCredential;
private long mNativeAccountChooserDialog;
private AlertDialog mDialog;
/**
* True, if credentials were selected via "Sign In" button instead of clicking on the credential
* itself.
*/
private boolean mSigninButtonClicked;
private AccountChooserDialog(Context context, long nativeAccountChooserDialog,
Credential[] credentials, String title, int titleLinkStart, int titleLinkEnd,
String origin, String signinButtonText) {
mNativeAccountChooserDialog = nativeAccountChooserDialog;
mContext = context;
mCredentials = credentials.clone();
mTitle = title;
mTitleLinkStart = titleLinkStart;
mTitleLinkEnd = titleLinkEnd;
mOrigin = origin;
mSigninButtonText = signinButtonText;
mSigninButtonClicked = false;
}
/**
* Creates and shows the dialog which allows user to choose credentials for login.
* @param credentials Credentials to display in the dialog.
* @param title Title message for the dialog, which can contain Smart Lock branding.
* @param titleLinkStart Start of a link in case title contains Smart Lock branding.
* @param titleLinkEnd End of a link in case title contains Smart Lock branding.
* @param origin Address of the web page, where dialog was triggered.
*/
@CalledByNative
private static AccountChooserDialog createAndShowAccountChooser(WindowAndroid windowAndroid,
long nativeAccountChooserDialog, Credential[] credentials, String title,
int titleLinkStart, int titleLinkEnd, String origin, String signinButtonText) {
Activity activity = windowAndroid.getActivity().get();
if (activity == null) return null;
AccountChooserDialog chooser =
new AccountChooserDialog(activity, nativeAccountChooserDialog, credentials, title,
titleLinkStart, titleLinkEnd, origin, signinButtonText);
chooser.show();
return chooser;
}
private ArrayAdapter<Credential> generateAccountsArrayAdapter(
Context context, Credential[] credentials) {
return new ArrayAdapter<Credential>(context, 0 /* resource */, credentials) {
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
LayoutInflater inflater = LayoutInflater.from(getContext());
convertView =
inflater.inflate(R.layout.account_chooser_dialog_item, parent, false);
}
convertView.setTag(position);
Credential credential = getItem(position);
ImageView avatarView = (ImageView) convertView.findViewById(R.id.profile_image);
Bitmap avatar = credential.getAvatar();
if (avatar != null) {
avatarView.setImageBitmap(avatar);
} else {
avatarView.setImageResource(R.drawable.account_management_no_picture);
}
TextView mainNameView = (TextView) convertView.findViewById(R.id.main_name);
TextView secondaryNameView =
(TextView) convertView.findViewById(R.id.secondary_name);
if (credential.getFederation().isEmpty()) {
// Not federated credentials case
if (credential.getDisplayName().isEmpty()) {
mainNameView.setText(credential.getUsername());
secondaryNameView.setVisibility(View.GONE);
} else {
mainNameView.setText(credential.getDisplayName());
secondaryNameView.setText(credential.getUsername());
secondaryNameView.setVisibility(View.VISIBLE);
}
} else {
mainNameView.setText(credential.getUsername());
secondaryNameView.setText(credential.getFederation());
secondaryNameView.setVisibility(View.VISIBLE);
}
ImageButton pslInfoButton =
(ImageButton) convertView.findViewById(R.id.psl_info_btn);
final String originUrl = credential.getOriginUrl();
if (!originUrl.isEmpty()) {
pslInfoButton.setVisibility(View.VISIBLE);
pslInfoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
showTooltip(
view,
UrlFormatter.formatUrlForSecurityDisplay(
originUrl, true /* showScheme */),
R.layout.material_tooltip);
}
});
}
return convertView;
}
};
}
private void show() {
View titleView =
LayoutInflater.from(mContext).inflate(R.layout.account_chooser_dialog_title, null);
TextView origin = (TextView) titleView.findViewById(R.id.origin);
origin.setText(mOrigin);
TextView titleMessageText = (TextView) titleView.findViewById(R.id.title);
if (mTitleLinkStart != 0 && mTitleLinkEnd != 0) {
SpannableString spanableTitle = new SpannableString(mTitle);
spanableTitle.setSpan(new ClickableSpan() {
@Override
public void onClick(View view) {
nativeOnLinkClicked(mNativeAccountChooserDialog);
mDialog.dismiss();
}
}, mTitleLinkStart, mTitleLinkEnd, Spanned.SPAN_INCLUSIVE_INCLUSIVE);
titleMessageText.setText(spanableTitle, TextView.BufferType.SPANNABLE);
titleMessageText.setMovementMethod(LinkMovementMethod.getInstance());
} else {
titleMessageText.setText(mTitle);
}
mAdapter = generateAccountsArrayAdapter(mContext, mCredentials);
final AlertDialog.Builder builder =
new AlertDialog.Builder(mContext, R.style.AlertDialogTheme)
.setCustomTitle(titleView)
.setNegativeButton(R.string.cancel, this)
.setAdapter(mAdapter, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int item) {
mCredential = mCredentials[item];
}
});
if (!TextUtils.isEmpty(mSigninButtonText)) {
builder.setPositiveButton(mSigninButtonText, this);
}
mDialog = builder.create();
mDialog.setOnDismissListener(this);
mDialog.show();
}
private void showTooltip(View view, String message, int layoutId) {
Context context = view.getContext();
Resources resources = context.getResources();
LayoutInflater inflater = LayoutInflater.from(context);
TextView text = (TextView) inflater.inflate(layoutId, null);
text.setText(message);
text.announceForAccessibility(message);
// This is a work-around for a bug on Android versions KitKat and below
// (http://crbug.com/693076). The tooltip wouldn't be shown otherwise.
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
text.setSingleLine(false);
}
// The tooltip should be shown above and to the left (right for RTL) of the info button.
// In order to do so the tooltip's location on the screen is determined. This location is
// specified with regard to the top left corner and ignores RTL layouts. For this reason the
// location of the tooltip is also specified as offsets to the top left corner of the
// screen. Since the tooltip should be shown above the info button, the height of the
// tooltip needs to be measured. Furthermore, the height of the statusbar is ignored when
// obtaining the icon's screen location, but must be considered when specifying a y offset.
// In addition, the measured width is needed in LTR layout, so that the right end of the
// tooltip aligns with the right end of the info icon.
final int[] screenPos = new int[2];
view.getLocationOnScreen(screenPos);
text.measure(MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED),
MeasureSpec.makeMeasureSpec(0, View.MeasureSpec.UNSPECIFIED));
final int width = view.getWidth();
final int xOffset = ApiCompatibilityUtils.isLayoutRtl(view)
? screenPos[0]
: screenPos[0] + width - text.getMeasuredWidth();
final int statusBarHeightResourceId =
resources.getIdentifier("status_bar_height", "dimen", "android");
final int statusBarHeight = statusBarHeightResourceId > 0
? resources.getDimensionPixelSize(statusBarHeightResourceId)
: 0;
final int tooltipMargin = resources.getDimensionPixelSize(R.dimen.psl_info_tooltip_margin);
final int yOffset =
screenPos[1] - tooltipMargin - statusBarHeight - text.getMeasuredHeight();
// The xOffset is with regard to the left edge of the screen. Gravity.LEFT is deprecated,
// which is why the following line is necessary.
final int xGravity = ApiCompatibilityUtils.isLayoutRtl(view) ? Gravity.END : Gravity.START;
Toast toast = new Toast(context);
toast.setGravity(Gravity.TOP | xGravity, xOffset, yOffset);
toast.setDuration(Toast.LENGTH_SHORT);
toast.setView(text);
toast.show();
}
@CalledByNative
private void imageFetchComplete(int index, Bitmap avatarBitmap) {
if (mIsDestroyed) return;
assert index >= 0 && index < mCredentials.length;
assert mCredentials[index] != null;
avatarBitmap = AccountManagementFragment.makeRoundUserPicture(avatarBitmap);
mCredentials[index].setBitmap(avatarBitmap);
ListView view = mDialog.getListView();
if (index >= view.getFirstVisiblePosition() && index <= view.getLastVisiblePosition()) {
// Profile image is in the visible range.
View credentialView = view.getChildAt(index - view.getFirstVisiblePosition());
if (credentialView == null) return;
ImageView avatar = (ImageView) credentialView.findViewById(R.id.profile_image);
avatar.setImageBitmap(avatarBitmap);
}
}
private void destroy() {
assert mNativeAccountChooserDialog != 0;
assert !mIsDestroyed;
mIsDestroyed = true;
nativeDestroy(mNativeAccountChooserDialog);
mNativeAccountChooserDialog = 0;
mDialog = null;
}
@CalledByNative
private void dismissDialog() {
assert !mWasDismissedByNative;
mWasDismissedByNative = true;
mDialog.dismiss();
}
@Override
public void onClick(DialogInterface dialog, int whichButton) {
if (whichButton == DialogInterface.BUTTON_POSITIVE) {
mCredential = mCredentials[0];
mSigninButtonClicked = true;
}
}
@Override
public void onDismiss(DialogInterface dialog) {
if (!mWasDismissedByNative) {
if (mCredential != null) {
nativeOnCredentialClicked(mNativeAccountChooserDialog, mCredential.getIndex(),
mSigninButtonClicked);
} else {
nativeCancelDialog(mNativeAccountChooserDialog);
}
}
destroy();
}
private native void nativeOnCredentialClicked(long nativeAccountChooserDialogAndroid,
int credentialId, boolean signinButtonClicked);
private native void nativeCancelDialog(long nativeAccountChooserDialogAndroid);
private native void nativeDestroy(long nativeAccountChooserDialogAndroid);
private native void nativeOnLinkClicked(long nativeAccountChooserDialogAndroid);
}
|
mogoweb/365browser
|
app/src/main/java/org/chromium/chrome/browser/password_manager/AccountChooserDialog.java
|
Java
|
apache-2.0
| 14,683 |
$(document).ready(function() {
PromotionGroup.init();
PromotionGroup.loadImage();
$('#add-new-promotion').on('click', function(){
PromotionGroup.addNew();
})
$('#update-promotion').on('click', function(){
PromotionGroup.update();
})
// date picker
$('#promotion-add-end-date').datepicker({
format: "dd/mm/yyyy"
});
$('#promotion-add-start-date').datepicker({
format: "dd/mm/yyyy"
});
$('#promotion-update-start-date').datepicker({
format: "dd/mm/yyyy"
});
$('#promotion-update-end-date').datepicker({
format: "dd/mm/yyyy"
});
//editor
CKEDITOR.replace('promotionContent');
CKEDITOR.replace('promotionContentUpdate');
});
var PromotionGroup = function () {
// load list promotion
var loadListPromotion = function() {
var html = '';
$.ajax({
url: URL + 'spaCMS/promotion/xhrLoad_promotion_list',
type: 'get',
dataType: 'json',
})
.done(function(data) {
$('span#promotion-title-spa').text('DANH SÁCH TIN KHUYẾN MÃI');
if(data['list'].length >0 ){
$('#promotion-mesage-list').hide('fast');
var totalPublish = 0;
$.each(data['list'], function(k, val){
totalPublish = (val.promotion_state == 1)? totalPublish+1 : totalPublish;
var state = (val.promotion_state == 0)? "button-other": "hide";
var nowDay = new Date();
var endDay = val.promotion_end_date;
var createDay = val.promotion_create_date;
var end_Day = val.promotion_end_date;
var startDay = val.promotion_start_date;
var strMark = "";
strMark = (new Date(end_Day) < new Date())? "color: red; text-decoration: line-through;" : "";
createDay = createDay.substr(8, 2)+'/'+ createDay.substr(5, 2)+'/'+createDay.substr(0, 4);
startDay = startDay.substr(8, 2)+'/'+ startDay.substr(5, 2)+'/'+startDay.substr(0, 4);
end_Day = end_Day.substr(8, 2)+'/'+ end_Day.substr(5, 2)+'/'+end_Day.substr(0, 4);
// var promotion_img = JSON.parse(val.promotion_img);
html += '<li class="row promotion-list-item" data-toggle="modal" data-target="#updatePromotion" data-id-promotion="'+val.promotion_id+'">';
html += '<div class="col-md-5" style="font-weight: 600; color: #888;">'+val.promotion_title+'</div>';
html += '<div class="col-md-2 created-date">'+createDay+'</div>';
html += '<div class="col-md-2">'+startDay+'</div>';
html += '<div class="col-md-2 end-date" style="'+strMark+'">'+end_Day+'</div>';
// html += '<div class="col-md-1">'+totalDay+'</div>';
html += '<div class="col-md-1 text-right" style="padding:0px;">';
html += '<button class="button '+state+' promotion-items-publish" data-promotion-id="'+val.promotion_id+'" title="Kích hoạt"> ';
html += '<i class="fa fa-check"></i>';
html += '</button> ';
html += '<button class="button button-secondary redeem promotion-items-delete" data-promotion-id="'+val.promotion_id+'" title="Xóa"> ';
html += '<i class="fa fa-trash"></i>';
html += '</button>';
html += '</div>';
html += '</li>';
});
html += '<input class="input-hide-state" type="hidden" value="'+totalPublish+'">';
$('#promotion-content').html(html);
}else{
$('#promotion-content').html('');
$('#promotion-mesage-list').fadeIn('fast');
}
})
.always(function(){
$('.promotion-items-delete').on('click',function(e){
var id_promotion = $(this).attr('data-promotion-id');
e.stopPropagation();
var cfr = confirm('Bạn có muốn xóa promotion này không?');
if(cfr == true){
deletePromotion(id_promotion);
}
});
$('.promotion-items-publish').on('click',function(e){
var id_promotion = $(this).attr('data-promotion-id');
var self = $(this);
var state = 1; // kich hoat
e.stopPropagation();
if($('input.input-hide-state').val() >4){
alert('Chỉ hiển thị được 5 promotion.');
}else{
publishPromotion(id_promotion, state);
}
});
$('.btn-close-modal').on('click',function (){
$(".modal").modal('hide');
})
$('#refres-promotion').on('click', function(){
refresh();
});
$('li.promotion-list-item').on('click', function(){
var self = $(this);
var id_promotion = self.attr('data-id-promotion');
$.ajax({
url: URL + 'spaCMS/promotion/xhrLoad_promotion_item',
type: 'post',
dataType: 'json',
data: {promotion_id: id_promotion},
})
.done(function(data) {
$.each(data, function(index, value) {
var title = value.promotion_title;
var id_promotion = value.promotion_id;
// img
var image = JSON.parse(value.promotion_img);
var img = image.img;
var thumbnail = image.thumbnail;
//date
var sDate = value.promotion_start_date;
sDate = sDate.replace(/-/g,' ');
var startDate = new Date(sDate);
var endDate = value.promotion_end_date;
endDate = new Date(endDate.replace(/-/g,' '));
var mainUpdate = $('#updatePromotion');
mainUpdate.find('#promotion-update-title').val(title);
mainUpdate.attr('data-id-promotion', id_promotion);
$('#promotion-update-start-date').datepicker('update',startDate);
$('#promotion-update-end-date').datepicker('update',endDate);
CKEDITOR.instances.promotionContentUpdate.setData(value.promotion_content);
var items = $('#ListIM_editUS');
var caller = $('#iM_editUS');
// ktra so luong hinh anh da co
caller.hide();
var out = null;
var html = '<li class="single-picture">';
html += '<div class="single-picture-wrapper">';
html += '<img id="user_slide_thumbnail" src=":img_thumbnail" data-img=":data-image" style="width:100px; height:70px;">';
html += '<input type="hidden" name="user_service_image[]" value=":image">';
html += '</div>';
html += '<div class="del_image icons-delete2"></div>';
html += '</li>';
out = html.replace(':img_thumbnail', thumbnail);
out = out.replace(':data-image', img);
out = out.replace(':image', img);
items.html(out);
// del image
$('.del_image').on("click", function(){
var self = $(this).parent();
// self.attr("disabled","disabled");
self.remove();
// Truong hop dac biet, ktra so luong hinh anh da co
var childrens = items.children().length;
if(childrens < 5) {
caller.fadeIn();
}
});
});
})
.always(function() {
});
})
});
}
function loadAsidePublish() {
var html = '';
$.ajax({
url: URL + 'spaCMS/promotion/xhrLoad_promotion_publish',
type: 'post',
dataType: 'json',
data: {promotion_state: 1},
})
.done(function(respon) {
if(respon.length > 0){
$.each(respon, function(index, val) {
// img
var title = val.promotion_title;
var image = JSON.parse(val.promotion_img);
var img = image.img;
var thumbnail = image.thumbnail;
html += '<div class="promotion-item-publish">';
html += '<div class="row-fluid">';
html += '<div class="promotion-item-puslish-title col-md-12">';
html += '<i class="fa fa-bullhorn"></i>';
html += '<span>'+title+'</span>';
html += '</div>';
// html += '<div class="col-sm-1">';
html += '<button class="btn btn-black promotion-items-change-publish" data-promotion-id="'+val.promotion_id+'" title="Tạm ngưng"> <i class="fa fa-close"></i></button>';
html += '</div>';
// html += '</div>';
html += '<div class="promotion-item-publish-img row-fluid">';
html += '<img class="pic" alt="" src="'+thumbnail+'"> ';
html += '</div>';
html += '</div>';
}); //end each
}else{
html +='<div class="row-fluid" style="padding:5px;padding: 5px; border: 1px solid #d88c8a;background-color: #F7E4E4;"><span><i class="fa fa-warning" style="color: #ffcc00;"></i> Hiện tại không có quảng cáo nào hiển thị.</span></div>';
}
$('.promotion-item-publish-wrap').html(html);
})
.fail(function() {
console.log("error");
})
.always(function() {
$('.promotion-items-change-publish').on('click', function(){
var promotion_id = $(this).attr('data-promotion-id');
publishPromotion(promotion_id,0);
});
});
}
// add new promotion
var addNewPromotion = function() {
var title = $('#promotion-add-title').val();
var now = new Date();
month = now.getMonth()+1;
var start_day = $('#promotion-add-start-date').val();
if(start_day.length >0){
start_day = start_day.substring(6,10)+'-'+start_day.substring(3,5)+'-'+start_day.substring(0,2)+' 00:00:00';
}else{
start_day = now.getFullYear()+'-'+month+'-'+now.getDate()+' 00:00:00';
}
var end_day = $('#promotion-add-end-date').val();
if(end_day.length >0){
end_day = end_day.substring(6,10)+'-'+end_day.substring(3,5)+'-'+end_day.substring(0,2)+' 00:00:00';
}else{
end_day = now.getFullYear()+'-'+month+'-'+now.getDate()+' 00:00:00';
}
var url_img = "";
var tagImg = $('#user_slide_thumbnail');
if(tagImg[0]){
var img = tagImg.attr('data-img');
var thumbnail = tagImg.attr('src');
url_img={};
url_img.img = img;
url_img.thumbnail = thumbnail;
}else{
url_img={};
url_img.img = URL+'/public/assets/img/noimage.jpg';
url_img.thumbnail = URL+'/public/assets/img/noimage.jpg';
}
var content = CKEDITOR.instances.promotionContent.getData();
var message = $('.promotion-message');
var jdata = {"title": title, "start_date": start_day, "end_date": end_day, "url_img": JSON.stringify(url_img), "content": content};
if(title.length<4){
message.text('Tiêu đề phải hơn 4 kí tự.').fadeIn('fast');
return false;
}else{
$.ajax({
url: URL + 'spaCMS/promotion/xhrInsert_promotion_item',
type: 'post',
dataType: 'json',
data: jdata,
})
.done(function(respon) {
if(respon === 1){
alert('Thêm promotion thành công.')
}else{
alert('Thêm promotion thất bại, bạn vui lòng thử lại.')
}
})
.fail(function() {
})
.always(function() {
loadListPromotion();
$(".modal").modal('hide');
refresh();
});
}
}
//delete Promotion
var deletePromotion = function(id_promotion) {
$.ajax({
url: URL + 'spaCMS/promotion/xhrDelete_promotion_item',
type: 'post',
dataType: 'json',
data: {'id_promotion': id_promotion},
})
.done(function(respon) {
if(respon==0){
alert('Xóa promotion thất bại, Xin vui lòng kiểm tra lại');
}
})
.fail(function(error) {
console.log("error: "+error);
})
.always(function() {
loadListPromotion();
loadAsidePublish();
});
}
var publishPromotion = function(id_promotion,state){
$.ajax({
url: URL + 'spaCMS/promotion/xhrPublish_promotion_item',
type: 'post',
dataType: 'json',
data: {'id_promotion': id_promotion, 'state_promotion': state},
})
.done(function(respon) {
if(respon == 0){
alert('Kích hoạt promotion thất bại, Xin vui lòng kiểm tra lại');
}
})
.fail(function(error) {
console.log("error: "+error);
})
.always(function() {
loadListPromotion();
loadAsidePublish()
});
}
// update Promotion
var updatePromotion = function(){
var title = $('#promotion-update-title').val();
var id_promotion = $('#updatePromotion').attr('data-id-promotion');
var start_day = $('#promotion-update-start-date').val();
var end_day = $('#promotion-update-end-date').val();
var now = new Date();
var month = now.getMonth()+1;
if( end_day.length >0 ){
end_day = end_day.substring(6,10)+'-'+end_day.substring(3,5)+'-'+end_day.substring(0,2)+' 00:00:00';
} else {
end_day = now.getFullYear()+'-'+month+'-'+now.getDate()+' 00:00:00';
}
if( start_day.length >0 ){
start_day = start_day.substring(6,10)+'-'+start_day.substring(3,5)+'-'+start_day.substring(0,2)+' 00:00:00';
} else {
start_day = now.getFullYear()+'-'+month+'-'+now.getDate()+' 00:00:00';
}
var url_img = "";
var tagImg = $('#user_slide_thumbnail');
if(tagImg[0]){
var img = tagImg.attr('data-img');
var thumbnail = tagImg.attr('src');
// url_img = '{"img": "'+img+'", "thumbnail": "'+thumbnail+'"}';
url_img={};
url_img.img = img;
url_img.thumbnail = thumbnail;
}else{
url_img={};
url_img.img = URL+'/public/assets/img/noimage.jpg';
url_img.thumbnail = URL+'/public/assets/img/noimage.jpg';
}
var content = CKEDITOR.instances.promotionContentUpdate.getData();
var message = $('.promotion-message');
var jdata = {"id_promotion": id_promotion, "title": title, "start_date": start_day, "end_date": end_day, "url_img": JSON.stringify(url_img), "content": content};
if(title.length<4){
message.text('Tiêu đề phải hơn 4 kí tự.').fadeIn('fast');
return false;
}else{
$.ajax({
url: URL + 'spaCMS/promotion/xhrUpdate_promotion_item',
type: 'post',
dataType: 'json',
data: jdata,
})
.done(function(respon) {
if(respon === 1){
alert('Cập nhật promotion thành công.')
}else{
alert('Cập nhật promotion thất bại, bạn vui lòng thử lại.')
}
})
.fail(function() {
console.log("error");
})
.always(function() {
loadListPromotion();
loadAsidePublish();
$(".modal").modal('hide');
});
}
}
// refresh form promotion
var refresh = function(){
var frmPromotion = $('#form-promotion');
frmPromotion.find('input#promotion-add-title').val('');
frmPromotion.find('input#promotion-add-end-date').val('');
frmPromotion.find('input#promotion-add-end-date').val('');
frmPromotion.find('span.promotion-message').fadeOut('fast').text('');
CKEDITOR.instances.promotionContent.setData('');
// del image
var self = $('.del_image').parent();
self.remove();
$('#iM_addUS').fadeIn('fast');
}
// Image Manager
var imageManager = function() {
// Gán thuộc tính cover_id tương ứng
$('#iM_editUS').click(function(){
$('#imageManager_saveChange').attr('cover_id','editUS');
});
$('#iM_addUS').click(function(){
$('#imageManager_saveChange').attr('cover_id','addUS');
});
// <!-- Save Change -->
$('#imageManager_saveChange').on('click', function(evt) {
evt.preventDefault();
// Define position insert to image
var cover_id = $(this).attr('cover_id');
// Define selected image
var radio_checked = $("input:radio[name='iM-radio']:checked"); // Radio checked
// image and thumbnail_image
var image = radio_checked.val();
var thumbnail = radio_checked.attr('data-image');
// Truong hop dac biet
if(cover_id == 'addUS') {
var caller = $('#iM_addUS');
var items = $('#ListIM_addUS');
}
else if(cover_id == 'editUS') {
var caller = $('#iM_editUS');
var items = $('#ListIM_editUS');
}
// ktra so luong hinh anh da co
var childrens = items.children().length + 1;
if(childrens == 1) {
caller.hide();
}
var out = null;
var html = '<li class="single-picture">';
html += '<div class="single-picture-wrapper">';
html += '<img id="user_slide_thumbnail" src=":img_thumbnail" data-img=":data-image" style="width:100px; height:60px;">';
html += '<input type="hidden" name="user_service_image[]" value=":image">';
html += '</div>';
html += '<div class="del_image icons-delete2"></div>';
html += '</li>';
out = html.replace(':img_thumbnail', thumbnail);
out = out.replace(':data-image', image);
out = out.replace(':image', image);
items.html(out);
// del image
$('.del_image').on("click", function(){
var self = $(this).parent();
// self.attr("disabled","disabled");
self.remove();
// Truong hop dac biet, ktra so luong hinh anh da co
var childrens = items.children().length;
if(childrens < 1) {
caller.fadeIn();
}
});
// Hide Modal
$("#imageManager_modal").modal('hide');
});
}
return {
init: function() { loadListPromotion(); loadAsidePublish();},
addNew: function(){ addNewPromotion(); },
update: function(){ updatePromotion(); },
loadImage: function(){ imageManager(); }
}
var refresh = function() {
console.log('refresh');
}
}();
// function offsetDay(dayStart, dayEnd){
// var offset = dayEnd.getTime() - dayStart.getTime();
// console.log(dayEnd.getTime()+'-'+dayStart.getTime());
// // lấy độ lệch của 2 mốc thời gian, đơn vị tính là millisecond
// var totalDays = Math.round(offset / 1000 / 60 / 60 / 24);
// console.log(dayStart.getTime());
// return totalDays;
// }
|
imtoantran/beleza
|
Views/spaCMS/promotion/js/spaCMS_promotion.js
|
JavaScript
|
apache-2.0
| 21,895 |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package orbin.deskclock;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Looper;
import android.os.Parcelable;
import android.provider.AlarmClock;
import android.text.TextUtils;
import android.text.format.DateFormat;
import orbin.deskclock.alarms.AlarmStateManager;
import orbin.deskclock.data.DataModel;
import orbin.deskclock.data.Timer;
import orbin.deskclock.events.Events;
import orbin.deskclock.provider.Alarm;
import orbin.deskclock.provider.AlarmInstance;
import orbin.deskclock.provider.DaysOfWeek;
import orbin.deskclock.timer.TimerFragment;
import orbin.deskclock.uidata.UiDataModel;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import static android.text.format.DateUtils.SECOND_IN_MILLIS;
import static orbin.deskclock.uidata.UiDataModel.Tab.ALARMS;
import static orbin.deskclock.uidata.UiDataModel.Tab.TIMERS;
/**
* This activity is never visible. It processes all public intents defined by {@link AlarmClock}
* that apply to alarms and timers. Its definition in AndroidManifest.xml requires callers to hold
* the com.android.alarm.permission.SET_ALARM permission to complete the requested action.
*/
public class HandleApiCalls extends Activity {
private Context mAppContext;
@Override
protected void onCreate(Bundle icicle) {
try {
super.onCreate(icicle);
mAppContext = getApplicationContext();
final Intent intent = getIntent();
final String action = intent == null ? null : intent.getAction();
if (action == null) {
return;
}
switch (action) {
case AlarmClock.ACTION_SET_ALARM:
handleSetAlarm(intent);
break;
case AlarmClock.ACTION_SHOW_ALARMS:
handleShowAlarms();
break;
case AlarmClock.ACTION_SET_TIMER:
handleSetTimer(intent);
break;
case AlarmClock.ACTION_DISMISS_ALARM:
handleDismissAlarm(intent);
break;
case AlarmClock.ACTION_SNOOZE_ALARM:
handleSnoozeAlarm();
}
} finally {
finish();
}
}
private void handleDismissAlarm(Intent intent) {
// Change to the alarms tab.
UiDataModel.getUiDataModel().setSelectedTab(ALARMS);
// Open DeskClock which is now positioned on the alarms tab.
startActivity(new Intent(mAppContext, DeskClock.class));
new DismissAlarmAsync(mAppContext, intent, this).execute();
}
public static void dismissAlarm(Alarm alarm, Context context, Activity activity) {
// only allow on background thread
if (Looper.myLooper() == Looper.getMainLooper()) {
throw new IllegalStateException("dismissAlarm must be called on a " +
"background thread");
}
final AlarmInstance alarmInstance = AlarmInstance.getNextUpcomingInstanceByAlarmId(
context.getContentResolver(), alarm.id);
if (alarmInstance == null) {
final String reason = context.getString(R.string.no_alarm_scheduled_for_this_time);
Voice.notifyFailure(activity, reason);
LogUtils.i(reason);
return;
}
final String time = DateFormat.getTimeFormat(context).format(
alarmInstance.getAlarmTime().getTime());
if (Utils.isAlarmWithin24Hours(alarmInstance)) {
AlarmStateManager.setPreDismissState(context, alarmInstance);
final String reason = context.getString(R.string.alarm_is_dismissed, time);
LogUtils.i(reason);
Voice.notifySuccess(activity, reason);
Events.sendAlarmEvent(R.string.action_dismiss, R.string.label_intent);
} else {
final String reason = context.getString(
R.string.alarm_cant_be_dismissed_still_more_than_24_hours_away, time);
Voice.notifyFailure(activity, reason);
LogUtils.i(reason);
}
}
private static class DismissAlarmAsync extends AsyncTask<Void, Void, Void> {
private final Context mContext;
private final Intent mIntent;
private final Activity mActivity;
public DismissAlarmAsync(Context context, Intent intent, Activity activity) {
mContext = context;
mIntent = intent;
mActivity = activity;
}
@Override
protected Void doInBackground(Void... parameters) {
final List<Alarm> alarms = getEnabledAlarms(mContext);
if (alarms.isEmpty()) {
final String reason = mContext.getString(R.string.no_scheduled_alarms);
LogUtils.i(reason);
Voice.notifyFailure(mActivity, reason);
return null;
}
// remove Alarms in MISSED, DISMISSED, and PREDISMISSED states
for (Iterator<Alarm> i = alarms.iterator(); i.hasNext();) {
final AlarmInstance alarmInstance = AlarmInstance.getNextUpcomingInstanceByAlarmId(
mContext.getContentResolver(), i.next().id);
if (alarmInstance == null ||
alarmInstance.mAlarmState > AlarmInstance.FIRED_STATE) {
i.remove();
}
}
final String searchMode = mIntent.getStringExtra(AlarmClock.EXTRA_ALARM_SEARCH_MODE);
if (searchMode == null && alarms.size() > 1) {
// shows the UI where user picks which alarm they want to DISMISS
final Intent pickSelectionIntent = new Intent(mContext,
AlarmSelectionActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
.putExtra(AlarmSelectionActivity.EXTRA_ALARMS,
alarms.toArray(new Parcelable[alarms.size()]));
mContext.startActivity(pickSelectionIntent);
Voice.notifySuccess(mActivity, mContext.getString(R.string.pick_alarm_to_dismiss));
return null;
}
// fetch the alarms that are specified by the intent
final FetchMatchingAlarmsAction fmaa =
new FetchMatchingAlarmsAction(mContext, alarms, mIntent, mActivity);
fmaa.run();
final List<Alarm> matchingAlarms = fmaa.getMatchingAlarms();
// If there are multiple matching alarms and it wasn't expected
// disambiguate what the user meant
if (!AlarmClock.ALARM_SEARCH_MODE_ALL.equals(searchMode) && matchingAlarms.size() > 1) {
final Intent pickSelectionIntent = new Intent(mContext, AlarmSelectionActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
.putExtra(AlarmSelectionActivity.EXTRA_ALARMS,
matchingAlarms.toArray(new Parcelable[matchingAlarms.size()]));
mContext.startActivity(pickSelectionIntent);
Voice.notifySuccess(mActivity, mContext.getString(R.string.pick_alarm_to_dismiss));
return null;
}
// Apply the action to the matching alarms
for (Alarm alarm : matchingAlarms) {
dismissAlarm(alarm, mContext, mActivity);
LogUtils.i("Alarm %s is dismissed", alarm);
}
return null;
}
private static List<Alarm> getEnabledAlarms(Context context) {
final String selection = String.format("%s=?", Alarm.ENABLED);
final String[] args = { "1" };
return Alarm.getAlarms(context.getContentResolver(), selection, args);
}
}
private void handleSnoozeAlarm() {
new SnoozeAlarmAsync(mAppContext, this).execute();
}
private static class SnoozeAlarmAsync extends AsyncTask<Void, Void, Void> {
private final Context mContext;
private final Activity mActivity;
public SnoozeAlarmAsync(Context context, Activity activity) {
mContext = context;
mActivity = activity;
}
@Override
protected Void doInBackground(Void... parameters) {
final List<AlarmInstance> alarmInstances = AlarmInstance.getInstancesByState(
mContext.getContentResolver(), AlarmInstance.FIRED_STATE);
if (alarmInstances.isEmpty()) {
final String reason = mContext.getString(R.string.no_firing_alarms);
LogUtils.i(reason);
Voice.notifyFailure(mActivity, reason);
return null;
}
for (AlarmInstance firingAlarmInstance : alarmInstances) {
snoozeAlarm(firingAlarmInstance, mContext, mActivity);
}
return null;
}
}
static void snoozeAlarm(AlarmInstance alarmInstance, Context context, Activity activity) {
// only allow on background thread
if (Looper.myLooper() == Looper.getMainLooper()) {
throw new IllegalStateException("snoozeAlarm must be called on a " +
"background thread");
}
final String time = DateFormat.getTimeFormat(context).format(
alarmInstance.getAlarmTime().getTime());
final String reason = context.getString(R.string.alarm_is_snoozed, time);
LogUtils.i(reason);
Voice.notifySuccess(activity, reason);
AlarmStateManager.setSnoozeState(context, alarmInstance, true);
LogUtils.i("Snooze %d:%d", alarmInstance.mHour, alarmInstance.mMinute);
Events.sendAlarmEvent(R.string.action_snooze, R.string.label_intent);
}
/***
* Processes the SET_ALARM intent
* @param intent Intent passed to the app
*/
private void handleSetAlarm(Intent intent) {
// If not provided or invalid, show UI
final int hour = intent.getIntExtra(AlarmClock.EXTRA_HOUR, -1);
// If not provided, use zero. If it is provided, make sure it's valid, otherwise, show UI
final int minutes;
if (intent.hasExtra(AlarmClock.EXTRA_MINUTES)) {
minutes = intent.getIntExtra(AlarmClock.EXTRA_MINUTES, -1);
} else {
minutes = 0;
}
if (hour < 0 || hour > 23 || minutes < 0 || minutes > 59) {
// Change to the alarms tab.
UiDataModel.getUiDataModel().setSelectedTab(ALARMS);
// Intent has no time or an invalid time, open the alarm creation UI.
final Intent createAlarm = Alarm.createIntent(this, DeskClock.class, Alarm.INVALID_ID)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
.putExtra(AlarmClockFragment.ALARM_CREATE_NEW_INTENT_EXTRA, true);
// Open DeskClock which is now positioned on the alarms tab.
startActivity(createAlarm);
Voice.notifyFailure(this, getString(R.string.invalid_time, hour, minutes, " "));
LogUtils.i("HandleApiCalls no/invalid time; opening UI");
return;
}
Events.sendAlarmEvent(R.string.action_create, R.string.label_intent);
final boolean skipUi = intent.getBooleanExtra(AlarmClock.EXTRA_SKIP_UI, false);
final StringBuilder selection = new StringBuilder();
final List<String> args = new ArrayList<>();
setSelectionFromIntent(intent, hour, minutes, selection, args);
// Update existing alarm matching the selection criteria; see setSelectionFromIntent.
final ContentResolver cr = getContentResolver();
final List<Alarm> alarms = Alarm.getAlarms(cr,
selection.toString(),
args.toArray(new String[args.size()]));
if (!alarms.isEmpty()) {
final Alarm alarm = alarms.get(0);
alarm.enabled = true;
Alarm.updateAlarm(cr, alarm);
// Delete all old instances and create a new one with updated values
AlarmStateManager.deleteAllInstances(this, alarm.id);
setupInstance(alarm.createInstanceAfter(Calendar.getInstance()), skipUi);
LogUtils.i("HandleApiCalls deleted old, created new alarm: %s", alarm);
return;
}
// Otherwise insert a new alarm.
final String message = getMessageFromIntent(intent);
final DaysOfWeek daysOfWeek = getDaysFromIntent(intent);
final boolean vibrate = intent.getBooleanExtra(AlarmClock.EXTRA_VIBRATE, true);
final String alert = intent.getStringExtra(AlarmClock.EXTRA_RINGTONE);
Alarm alarm = new Alarm(hour, minutes);
alarm.enabled = true;
alarm.label = message;
alarm.daysOfWeek = daysOfWeek;
alarm.vibrate = vibrate;
if (alert != null) {
if (AlarmClock.VALUE_RINGTONE_SILENT.equals(alert) || alert.isEmpty()) {
alarm.alert = Alarm.NO_RINGTONE_URI;
} else {
alarm.alert = Uri.parse(alert);
}
}
alarm.deleteAfterUse = !daysOfWeek.isRepeating() && skipUi;
alarm = Alarm.addAlarm(cr, alarm);
final AlarmInstance alarmInstance = alarm.createInstanceAfter(Calendar.getInstance());
setupInstance(alarmInstance, skipUi);
final String time = DateFormat.getTimeFormat(mAppContext).format(
alarmInstance.getAlarmTime().getTime());
Voice.notifySuccess(this, getString(R.string.alarm_is_set, time));
LogUtils.i("HandleApiCalls set up alarm: %s", alarm);
}
private void handleShowAlarms() {
// Change to the alarms tab.
UiDataModel.getUiDataModel().setSelectedTab(ALARMS);
// Open DeskClock which is now positioned on the alarms tab.
startActivity(new Intent(this, DeskClock.class));
Events.sendAlarmEvent(R.string.action_show, R.string.label_intent);
LogUtils.i("HandleApiCalls show alarms");
}
private void handleSetTimer(Intent intent) {
// If no length is supplied, show the timer setup view.
if (!intent.hasExtra(AlarmClock.EXTRA_LENGTH)) {
// Change to the timers tab.
UiDataModel.getUiDataModel().setSelectedTab(TIMERS);
// Open DeskClock which is now positioned on the timers tab and show the timer setup.
startActivity(TimerFragment.createTimerSetupIntent(this));
LogUtils.i("HandleApiCalls showing timer setup");
return;
}
// Verify that the timer length is between one second and one day.
final long lengthMillis = SECOND_IN_MILLIS * intent.getIntExtra(AlarmClock.EXTRA_LENGTH, 0);
if (lengthMillis < Timer.MIN_LENGTH || lengthMillis > Timer.MAX_LENGTH) {
Voice.notifyFailure(this, getString(R.string.invalid_timer_length));
LogUtils.i("Invalid timer length requested: " + lengthMillis);
return;
}
final String label = getMessageFromIntent(intent);
final boolean skipUi = intent.getBooleanExtra(AlarmClock.EXTRA_SKIP_UI, false);
// Attempt to reuse an existing timer that is Reset with the same length and label.
Timer timer = null;
for (Timer t : DataModel.getDataModel().getTimers()) {
if (!t.isReset()) { continue; }
if (t.getLength() != lengthMillis) { continue; }
if (!TextUtils.equals(label, t.getLabel())) { continue; }
timer = t;
break;
}
// Create a new timer if one could not be reused.
if (timer == null) {
timer = DataModel.getDataModel().addTimer(lengthMillis, label, skipUi);
Events.sendTimerEvent(R.string.action_create, R.string.label_intent);
}
// Start the selected timer.
DataModel.getDataModel().startTimer(timer);
Events.sendTimerEvent(R.string.action_start, R.string.label_intent);
Voice.notifySuccess(this, getString(R.string.timer_created));
// If not instructed to skip the UI, display the running timer.
if (!skipUi) {
// Change to the timers tab.
UiDataModel.getUiDataModel().setSelectedTab(TIMERS);
// Open DeskClock which is now positioned on the timers tab.
startActivity(new Intent(this, DeskClock.class)
.putExtra(HandleDeskClockApiCalls.EXTRA_TIMER_ID, timer.getId()));
}
}
private void setupInstance(AlarmInstance instance, boolean skipUi) {
instance = AlarmInstance.addInstance(this.getContentResolver(), instance);
AlarmStateManager.registerInstance(this, instance, true);
AlarmUtils.popAlarmSetToast(this, instance.getAlarmTime().getTimeInMillis());
if (!skipUi) {
// Change to the alarms tab.
UiDataModel.getUiDataModel().setSelectedTab(ALARMS);
// Open DeskClock which is now positioned on the alarms tab.
final Intent showAlarm = Alarm.createIntent(this, DeskClock.class, instance.mAlarmId)
.putExtra(AlarmClockFragment.SCROLL_TO_ALARM_INTENT_EXTRA, instance.mAlarmId)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(showAlarm);
}
}
private static String getMessageFromIntent(Intent intent) {
final String message = intent.getStringExtra(AlarmClock.EXTRA_MESSAGE);
return message == null ? "" : message;
}
private static DaysOfWeek getDaysFromIntent(Intent intent) {
final DaysOfWeek daysOfWeek = new DaysOfWeek(0);
final ArrayList<Integer> days = intent.getIntegerArrayListExtra(AlarmClock.EXTRA_DAYS);
if (days != null) {
final int[] daysArray = new int[days.size()];
for (int i = 0; i < days.size(); i++) {
daysArray[i] = days.get(i);
}
daysOfWeek.setDaysOfWeek(true, daysArray);
} else {
// API says to use an ArrayList<Integer> but we allow the user to use a int[] too.
final int[] daysArray = intent.getIntArrayExtra(AlarmClock.EXTRA_DAYS);
if (daysArray != null) {
daysOfWeek.setDaysOfWeek(true, daysArray);
}
}
return daysOfWeek;
}
/**
* Assemble a database where clause to search for an alarm matching the given {@code hour} and
* {@code minutes} as well as all of the optional information within the {@code intent}
* including:
*
* <ul>
* <li>alarm message</li>
* <li>repeat days</li>
* <li>vibration setting</li>
* <li>ringtone uri</li>
* </ul>
*
* @param intent contains details of the alarm to be located
* @param hour the hour of the day of the alarm
* @param minutes the minute of the hour of the alarm
* @param selection an out parameter containing a SQL where clause
* @param args an out parameter containing the values to substitute into the {@code selection}
*/
private void setSelectionFromIntent(
Intent intent,
int hour,
int minutes,
StringBuilder selection,
List<String> args) {
selection.append(Alarm.HOUR).append("=?");
args.add(String.valueOf(hour));
selection.append(" AND ").append(Alarm.MINUTES).append("=?");
args.add(String.valueOf(minutes));
if (intent.hasExtra(AlarmClock.EXTRA_MESSAGE)) {
selection.append(" AND ").append(Alarm.LABEL).append("=?");
args.add(getMessageFromIntent(intent));
}
// Days is treated differently that other fields because if days is not specified, it
// explicitly means "not recurring".
selection.append(" AND ").append(Alarm.DAYS_OF_WEEK).append("=?");
args.add(String.valueOf(intent.hasExtra(AlarmClock.EXTRA_DAYS)
? getDaysFromIntent(intent).getBitSet() : DaysOfWeek.NO_DAYS_SET));
if (intent.hasExtra(AlarmClock.EXTRA_VIBRATE)) {
selection.append(" AND ").append(Alarm.VIBRATE).append("=?");
args.add(intent.getBooleanExtra(AlarmClock.EXTRA_VIBRATE, false) ? "1" : "0");
}
if (intent.hasExtra(AlarmClock.EXTRA_RINGTONE)) {
selection.append(" AND ").append(Alarm.RINGTONE).append("=?");
String ringTone = intent.getStringExtra(AlarmClock.EXTRA_RINGTONE);
if (ringTone == null) {
// If the intent explicitly specified a NULL ringtone, treat it as the default
// ringtone.
ringTone = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM).toString();
} else if (AlarmClock.VALUE_RINGTONE_SILENT.equals(ringTone) || ringTone.isEmpty()) {
ringTone = Alarm.NO_RINGTONE;
}
args.add(ringTone);
}
}
}
|
OrBin/SynClock-Android
|
src/orbin/deskclock/HandleApiCalls.java
|
Java
|
apache-2.0
| 21,891 |
package com.atom.empire.das.osite.auto.records;
import org.apache.empire.db.DBRecord;
import com.atom.empire.das.osite.auto.OSiteTable;
public abstract class OSiteTBO<T extends OSiteTable> extends DBRecord {
private static final long serialVersionUID = 1L;
public OSiteTBO(T table) {
super(table);
}
/**
* Returns the table this record is based upon.
* @return The table this record is based upon.
*/
@SuppressWarnings("unchecked")
public T getTable() {
return (T)super.getRowSet();
}
}
|
wuhongjun/atom-empire-demo
|
src/main/java/com/atom/empire/das/osite/auto/records/OSiteTBO.java
|
Java
|
apache-2.0
| 533 |
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/**
* Erlang distributed pseudorandom numbers.
*
* @module @stdlib/random/base/erlang
*
* @example
* var erlang = require( '@stdlib/random/base/erlang' );
*
* var v = erlang( 3, 2.5 );
* // returns <number>
*
* @example
* var factory = require( '@stdlib/random/base/erlang' ).factory;
*
* var erlang = factory( 8, 5.9, {
* 'seed': 297
* });
*
* var v = erlang();
* // returns <number>
*/
// MODULES //
var setReadOnly = require( '@stdlib/utils/define-nonenumerable-read-only-property' );
var erlang = require( './main.js' );
var factory = require( './factory.js' );
// MAIN //
setReadOnly( erlang, 'factory', factory );
// EXPORTS //
module.exports = erlang;
|
stdlib-js/stdlib
|
lib/node_modules/@stdlib/random/base/erlang/lib/index.js
|
JavaScript
|
apache-2.0
| 1,304 |
/*******************************************************************************
* Copyright 2015 Maximilian Stark | Dakror <mail@dakror.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.dakror.spamwars.net.packet;
/**
* @author Dakror
*/
public class Packet01Disconnect extends Packet {
public enum Cause {
SERVER_CLOSED("Der Server wurde geschlossen."),
USER_DISCONNECT("Spiel beendet."),
;
private String description;
private Cause(String desc) {
description = desc;
}
public String getDescription() {
return description;
}
}
private Cause cause;
private String username;
public Packet01Disconnect(byte[] data) {
super(1);
String[] s = readData(data).split(":");
username = s[0];
cause = Cause.values()[Integer.parseInt(s[1])];
}
public Packet01Disconnect(String username, Cause cause) {
super(1);
this.username = username;
this.cause = cause;
}
@Override
public byte[] getPacketData() {
return (username + ":" + cause.ordinal()).getBytes();
}
public String getUsername() {
return username;
}
public Cause getCause() {
return cause;
}
}
|
Dakror/SpamWars
|
src/main/java/de/dakror/spamwars/net/packet/Packet01Disconnect.java
|
Java
|
apache-2.0
| 1,729 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def load_docstrings(module):
module.State.__init__.__func__.__doc__ = """
Construct a State.
Parameters
----------
X : np.ndarray
Data matrix, each row is an observation and each column a variable.
outputs : list<int>, optional
Unique non-negative ID for each column in X, and used to refer to
the column for all future queries. Defaults to range(0, X.shape[1])
inputs : list<int>, optional
Currently unsupported.
cctypes : list<str>
Data type of each column, see `utils.config` for valid cctypes.
distargs : list<dict>, optional
See the documentation for each DistributionGpm for its distargs.
Zv : dict(int:int), optional
Assignment of output columns to views, where Zv[k] is the
view assignment for column k. Defaults to sampling from CRP.
Zrv : dict(int:list<int>), optional
Assignment of rows to clusters in each view, where Zrv[k] is
the Zr for View k. If specified, then Zv must also be specified.
Defaults to sampling from CRP.
Cd : list(list<int>), optional
List of marginal dependence constraints for columns. Each element in
the list is a list of columns which are to be in the same view. Each
column can only be in one such list i.e. [[1,2,5],[1,5]] is not
allowed.
Ci : list(tuple<int>), optional
List of marginal independence constraints for columns.
Each element in the list is a 2-tuple of columns that must be
independent, i.e. [(1,2),(1,3)].
Rd : dict(int:Cd), optional
Dictionary of dependence constraints for rows, wrt.
Each entry is (col: Cd), where col is a column number and Cd is a
list of dependence constraints for the rows with respect to that
column (see doc for Cd).
Ri : dict(int:Cid), optional
Dictionary of independence constraints for rows, wrt.
Each entry is (col: Ci), where col is a column number and Ci is a
list of independence constraints for the rows with respect to that
column (see doc for Ci).
iterations : dict(str:int), optional
Metadata holding the number of iters each kernel has been run.
loom_path: str, optional
Path to a loom project compatible with this State.
rng : np.random.RandomState, optional.
Source of entropy.
"""
# --------------------------------------------------------------------------
# Observe
module.State.incorporate_dim.__func__.__doc__ = """
Incorporate a new Dim into this State.
Parameters
----------
T : list
Data with length self.n_rows().
outputs : list[int]
Identity of the variable modeled by this dim, must be non-negative
and cannot collide with State.outputs. Only univariate outputs
currently supported, so the list be a singleton.
cctype, distargs:
refer to State.__init__
v : int, optional
Index of the view to assign the data. If 0 <= v < len(state.views)
then insert into an existing View. If v = len(state.views) then
singleton view will be created with a partition from the CRP prior.
If unspecified, will be sampled.
"""
# --------------------------------------------------------------------------
# Schema updates.
module.State.update_cctype.__func__.__doc__ = """
Update the distribution type of self.dims[col] to cctype.
Parameters
----------
col : int
Index of column to update.
cctype, distargs:
refer to State.__init__
"""
# --------------------------------------------------------------------------
# Compositions
module.State.compose_cgpm.__func__.__doc__ = """
Compose a CGPM with this object.
Parameters
----------
cgpm : cgpm.cgpm.CGpm object
The `CGpm` object to compose.
Returns
-------
token : int
A unique token representing the composed cgpm, to be used
by `State.decompose_cgpm`.
"""
module.State.decompose_cgpm.__func__.__doc__ = """
Decompose a previously composed CGPM.
Parameters
----------
token : int
The unique token representing the composed cgpm, returned from
`State.compose_cgpm`.
"""
# --------------------------------------------------------------------------
# logpdf_score
module.State.logpdf_score.__func__.__doc__ = """
Compute joint density of all latents and the incorporated data.
Returns
-------
logpdf_score : float
The log score is P(X,Z) = P(X|Z)P(Z) where X is the observed data
and Z is the entirety of the latent state in the CGPM.
"""
# --------------------------------------------------------------------------
# Mutual information
module.State.mutual_information.__func__.__doc__ = """
Computes the mutual information MI(col0:col1|constraints).
Mutual information with constraints can be of the form:
- MI(X:Y|Z=z): CMI at a fixed conditioning value.
- MI(X:Y|Z): expected CMI E_Z[MI(X:Y|Z)] under Z.
- MI(X:Y|Z, W=w): expected CMI E_Z[MI(X:Y|Z,W=w)] under Z.
This function supports all three forms. The CMI is computed under the
posterior predictive joint distributions.
Parameters
----------
col0, col1 : list<int>
Columns to comptue MI. If all columns in `col0` are equivalent
to columns in `col` then entropy is returned, otherwise they must
be disjoint and the CMI is returned
constraints : list(tuple), optional
A list of pairs (col, val) of observed values to condition on. If
`val` is None, then `col` is marginalized over.
T : int, optional.
Number of samples to use in the outer (marginalization) estimator.
N : int, optional.
Number of samples to use in the inner Monte Carlo estimator.
Returns
-------
mi : float
A point estimate of the mutual information.
Examples
-------
# Compute MI(X:Y)
>>> State.mutual_information(col_x, col_y)
# Compute MI(X:Y|Z=1)
>>> State.mutual_information(col_x, col_y, {col_z: 1})
# Compute MI(X:Y|W)
>>> State.mutual_information(col_x, col_y, {col_w:None})
# Compute MI(X:Y|Z=1, W)
>>> State.mutual_information(col_x, col_y, {col_z: 1, col_w:None})
"""
# --------------------------------------------------------------------------
# Inference
module.State.transition.__func__.__doc__ = """
Run targeted inference kernels.
Parameters
----------
N : int, optional
Number of iterations to transition. Default 1.
S : float, optional
Number of seconds to transition. If both N and S set then min used.
kernels : list<{'alpha', 'view_alphas', 'column_params', 'column_hypers'
'rows', 'columns'}>, optional
List of inference kernels to run in this transition. Default all.
views, rows, cols : list<int>, optional
View, row and column numbers to apply the kernels. Default all.
checkpoint : int, optional
Number of transitions between recording inference diagnostics
from the latent state (such as logscore and row/column partitions).
Defaults to no checkpointing.
progress : boolean, optional
Show a progress bar for number of target iterations or elapsed time.
"""
|
probcomp/cgpm
|
src/crosscat/statedoc.py
|
Python
|
apache-2.0
| 8,646 |
package jsfunction.gwt.returns;
import jsfunction.gwt.JsFunction;
import com.google.gwt.core.client.JavaScriptObject;
/**
* This is the JavaScriptObject type returned from JsFunction.create(JsReturn).
* It can be passed as an argument to a JavaScript function, which then returns
* its results asynchronously by calling "arg.result(someResult)". If the
* JavaScript function encounters an error or exception, it can return that
* as a JavaScript "Error" class via "arg.error(new Error('message'))" (or simply
* "arg.error(caughtError)").
*
* @author richkadel
*/
public final class JsResultOrError extends JavaScriptObject {
protected JsResultOrError() {}
public native JsFunction resultFunction() /*-{ return this.result }-*/;
public native JsFunction errorFunction() /*-{ return this.error }-*/;
}
|
richkadel/jsfunction-gwt
|
jsfunction-gwt-main/src/main/java/jsfunction/gwt/returns/JsResultOrError.java
|
Java
|
apache-2.0
| 825 |
package main
import (
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/prometheus/client_golang/prometheus"
"time"
"regexp"
"strings"
)
func getLatestDatapoint(datapoints []*cloudwatch.Datapoint) *cloudwatch.Datapoint {
var latest *cloudwatch.Datapoint = nil
for dp := range datapoints {
if latest == nil || latest.Timestamp.Before(*datapoints[dp].Timestamp) {
latest = datapoints[dp]
}
}
return latest
}
// scrape makes the required calls to AWS CloudWatch by using the parameters in the cwCollector
// Once converted into Prometheus format, the metrics are pushed on the ch channel.
func scrape(collector *cwCollector, ch chan<- prometheus.Metric) {
session := session.Must(session.NewSession(&aws.Config{
Region: aws.String(collector.Region),
}))
svc := cloudwatch.New(session)
for m := range collector.Template.Metrics {
metric := &collector.Template.Metrics[m]
now := time.Now()
end := now.Add(time.Duration(-metric.ConfMetric.DelaySeconds) * time.Second)
params := &cloudwatch.GetMetricStatisticsInput{
EndTime: aws.Time(end),
StartTime: aws.Time(end.Add(time.Duration(-metric.ConfMetric.RangeSeconds) * time.Second)),
Period: aws.Int64(int64(metric.ConfMetric.PeriodSeconds)),
MetricName: aws.String(metric.ConfMetric.Name),
Namespace: aws.String(metric.ConfMetric.Namespace),
Dimensions: []*cloudwatch.Dimension{},
Unit: nil,
}
dimensions:=[]*cloudwatch.Dimension{}
//This map will hold dimensions name which has been already collected
valueCollected := map[string]bool{}
if len(metric.ConfMetric.DimensionsSelectRegex) == 0 {
metric.ConfMetric.DimensionsSelectRegex = map[string]string{}
}
//Check for dimensions who does not have either select or dimensions select_regex and make them select everything using regex
for _,dimension := range metric.ConfMetric.Dimensions {
_, found := metric.ConfMetric.DimensionsSelect[dimension]
_, found2 := metric.ConfMetric.DimensionsSelectRegex[dimension]
if !found && !found2 {
metric.ConfMetric.DimensionsSelectRegex[dimension]=".*"
}
}
if metric.ConfMetric.Statistics != nil {
params.SetStatistics(aws.StringSlice(metric.ConfMetric.Statistics))
}
if metric.ConfMetric.ExtendedStatistics != nil {
params.SetExtendedStatistics(aws.StringSlice(metric.ConfMetric.ExtendedStatistics))
}
labels := make([]string, 0, len(metric.LabelNames))
// Loop through the dimensions selects to build the filters and the labels array
for dim := range metric.ConfMetric.DimensionsSelect {
for val := range metric.ConfMetric.DimensionsSelect[dim] {
dimValue := metric.ConfMetric.DimensionsSelect[dim][val]
// Replace $_target token by the actual URL target
if dimValue == "$_target" {
dimValue = collector.Target
}
dimensions = append(dimensions, &cloudwatch.Dimension{
Name: aws.String(dim),
Value: aws.String(dimValue),
})
labels = append(labels, dimValue)
}
}
if len(dimensions) > 0 || len(metric.ConfMetric.Dimensions) ==0 {
labels = append(labels, collector.Template.Task.Name)
params.Dimensions=dimensions
scrapeSingleDataPoint(collector,ch,params,metric,labels,svc)
}
//If no regex is specified, continue
if (len(metric.ConfMetric.DimensionsSelectRegex)==0){
continue
}
// Get all the metric to select the ones who'll match the regex
result, err := svc.ListMetrics(&cloudwatch.ListMetricsInput{
MetricName: aws.String(metric.ConfMetric.Name),
Namespace: aws.String(metric.ConfMetric.Namespace),
})
nextToken:=result.NextToken
metrics:=result.Metrics
totalRequests.Inc()
if err != nil {
fmt.Println(err)
continue
}
for nextToken!=nil {
result, err := svc.ListMetrics(&cloudwatch.ListMetricsInput{
MetricName: aws.String(metric.ConfMetric.Name),
Namespace: aws.String(metric.ConfMetric.Namespace),
NextToken: nextToken,
})
if err != nil {
fmt.Println(err)
continue
}
nextToken=result.NextToken
metrics=append(metrics,result.Metrics...)
}
//For each metric returned by aws
for _,met := range result.Metrics {
labels := make([]string, 0, len(metric.LabelNames))
dimensions=[]*cloudwatch.Dimension{}
//Try to match each dimensions to the regex
for _,dim := range met.Dimensions {
dimRegex:=metric.ConfMetric.DimensionsSelectRegex[*dim.Name]
if(dimRegex==""){
dimRegex="\\b"+strings.Join(metric.ConfMetric.DimensionsSelect[*dim.Name],"\\b|\\b")+"\\b"
}
match,_:=regexp.MatchString(dimRegex,*dim.Value)
if match {
dimensions=append(dimensions, &cloudwatch.Dimension{
Name: aws.String(*dim.Name),
Value: aws.String(*dim.Value),
})
labels = append(labels, *dim.Value)
}
}
//Cheking if all dimensions matched
if len(labels) == len(metric.ConfMetric.Dimensions) {
//Checking if this couple of dimensions has already been scraped
if _, ok := valueCollected[strings.Join(labels,";")]; ok {
continue
}
//If no, then scrape them
valueCollected[strings.Join(labels,";")]=true
params.Dimensions = dimensions
labels = append(labels, collector.Template.Task.Name)
scrapeSingleDataPoint(collector,ch,params,metric,labels,svc)
}
}
}
}
//Send a single dataPoint to the Prometheus lib
func scrapeSingleDataPoint(collector *cwCollector, ch chan<- prometheus.Metric,params *cloudwatch.GetMetricStatisticsInput,metric *cwMetric,labels []string,svc *cloudwatch.CloudWatch) error {
resp, err := svc.GetMetricStatistics(params)
totalRequests.Inc()
if err != nil {
collector.ErroneousRequests.Inc()
fmt.Println(err)
return err
}
// There's nothing in there, don't publish the metric
if len(resp.Datapoints) == 0 {
return nil
}
// Pick the latest datapoint
dp := getLatestDatapoint(resp.Datapoints)
if dp.Sum != nil {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.Sum), labels...)
}
if dp.Average != nil {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.Average), labels...)
}
if dp.Maximum != nil {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.Maximum), labels...)
}
if dp.Minimum != nil {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.Minimum), labels...)
}
if dp.SampleCount != nil {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.SampleCount), labels...)
}
for e := range dp.ExtendedStatistics {
ch <- prometheus.MustNewConstMetric(metric.Desc, metric.ValType, float64(*dp.ExtendedStatistics[e]), labels...)
}
return nil
}
|
Technofy/cloudwatch_exporter
|
aws.go
|
GO
|
apache-2.0
| 6,778 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.sort.sortdata;
import java.io.File;
import java.io.FileFilter;
import java.util.AbstractQueue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.PriorityQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.carbondata.common.CarbonIterator;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.block.SegmentProperties;
import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
import org.apache.carbondata.core.scan.result.iterator.RawResultIterator;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.log4j.Logger;
public class SingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> {
/**
* LOGGER
*/
private static final Logger LOGGER =
LogServiceFactory.getLogService(SingleThreadFinalSortFilesMerger.class.getName());
/**
* lockObject
*/
private static final Object LOCKOBJECT = new Object();
/**
* recordHolderHeap
*/
private AbstractQueue<SortTempFileChunkHolder> recordHolderHeapLocal;
/**
* tableName
*/
private String tableName;
private SortParameters sortParameters;
private SortStepRowHandler sortStepRowHandler;
/**
* tempFileLocation
*/
private String[] tempFileLocation;
private int maxThreadForSorting;
private ExecutorService executorService;
private List<Future<Void>> mergerTask;
public SingleThreadFinalSortFilesMerger(String[] tempFileLocation, String tableName,
SortParameters sortParameters) {
this.tempFileLocation = tempFileLocation;
this.tableName = tableName;
this.sortParameters = sortParameters;
this.sortStepRowHandler = new SortStepRowHandler(sortParameters);
try {
maxThreadForSorting = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD,
CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD_DEFAULTVALUE));
} catch (NumberFormatException e) {
maxThreadForSorting =
Integer.parseInt(CarbonCommonConstants.CARBON_MERGE_SORT_READER_THREAD_DEFAULTVALUE);
}
this.mergerTask = new ArrayList<>();
}
/**
* This method will be used to merger the merged files
*
* @throws CarbonSortKeyAndGroupByException
*/
public void startFinalMerge() throws CarbonDataWriterException {
List<File> filesToMerge = getFilesToMergeSort();
if (filesToMerge.size() == 0) {
LOGGER.info("No file to merge in final merge stage");
return;
}
startSorting(filesToMerge);
}
/**
* Below method will be used to add in memory raw result iterator to priority queue.
* This will be called in case of compaction, when it is compacting sorted and unsorted
* both type of carbon data file
* This method will add sorted file's RawResultIterator to priority queue using
* InMemorySortTempChunkHolder as wrapper
*
* @param sortedRawResultMergerList
* @param segmentProperties
* @param noDicAndComplexColumns
*/
public void addInMemoryRawResultIterator(List<RawResultIterator> sortedRawResultMergerList,
SegmentProperties segmentProperties, CarbonColumn[] noDicAndComplexColumns,
DataType[] measureDataType) {
for (RawResultIterator rawResultIterator : sortedRawResultMergerList) {
InMemorySortTempChunkHolder inMemorySortTempChunkHolder =
new InMemorySortTempChunkHolder(rawResultIterator, segmentProperties,
noDicAndComplexColumns, sortParameters, measureDataType);
if (inMemorySortTempChunkHolder.hasNext()) {
inMemorySortTempChunkHolder.readRow();
recordHolderHeapLocal.add(inMemorySortTempChunkHolder);
}
}
}
private List<File> getFilesToMergeSort() {
final int rangeId = sortParameters.getRangeId();
FileFilter fileFilter = new FileFilter() {
public boolean accept(File pathname) {
return pathname.getName().startsWith(tableName + '_' + rangeId);
}
};
// get all the merged files
List<File> files = new ArrayList<File>(tempFileLocation.length);
for (String tempLoc : tempFileLocation) {
File[] subFiles = new File(tempLoc).listFiles(fileFilter);
if (null != subFiles && subFiles.length > 0) {
files.addAll(Arrays.asList(subFiles));
}
}
return files;
}
/**
* Below method will be used to start storing process This method will get
* all the temp files present in sort temp folder then it will create the
* record holder heap and then it will read first record from each file and
* initialize the heap
*
* @throws CarbonSortKeyAndGroupByException
*/
private void startSorting(List<File> files) throws CarbonDataWriterException {
if (files.size() == 0) {
LOGGER.info("No files to merge sort");
return;
}
LOGGER.info("Started Final Merge");
LOGGER.info("Number of temp file: " + files.size());
// create record holder heap
createRecordHolderQueue(files.size());
// iterate over file list and create chunk holder and add to heap
LOGGER.info("Started adding first record from each file");
this.executorService = Executors.newFixedThreadPool(maxThreadForSorting);
for (final File tempFile : files) {
Callable<Void> callable = new Callable<Void>() {
@Override
public Void call() {
// create chunk holder
SortTempFileChunkHolder sortTempFileChunkHolder =
new SortTempFileChunkHolder(tempFile, sortParameters, tableName, true);
try {
// initialize
sortTempFileChunkHolder.initialize();
sortTempFileChunkHolder.readRow();
} catch (CarbonSortKeyAndGroupByException ex) {
sortTempFileChunkHolder.closeStream();
notifyFailure(ex);
}
synchronized (LOCKOBJECT) {
recordHolderHeapLocal.add(sortTempFileChunkHolder);
}
return null;
}
};
mergerTask.add(executorService.submit(callable));
}
executorService.shutdown();
try {
executorService.awaitTermination(2, TimeUnit.HOURS);
} catch (Exception e) {
throw new CarbonDataWriterException(e);
}
checkFailure();
LOGGER.info("final merger Heap Size" + this.recordHolderHeapLocal.size());
}
private void checkFailure() {
for (int i = 0; i < mergerTask.size(); i++) {
try {
mergerTask.get(i).get();
} catch (InterruptedException | ExecutionException e) {
throw new CarbonDataWriterException(e);
}
}
}
/**
* This method will be used to create the heap which will be used to hold
* the chunk of data
*/
private void createRecordHolderQueue(int size) {
// creating record holder heap
this.recordHolderHeapLocal = new PriorityQueue<SortTempFileChunkHolder>(size);
}
private synchronized void notifyFailure(Throwable throwable) {
close();
LOGGER.error(throwable);
}
/**
* This method will be used to get the sorted sort temp row from the sort temp files
*
* @return sorted row
* @throws CarbonSortKeyAndGroupByException
*/
public Object[] next() {
if (hasNext()) {
IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
} else {
throw new NoSuchElementException("No more elements to return");
}
}
/**
* This method will be used to get the sorted record from file
*
* @return sorted record sorted record
* @throws CarbonSortKeyAndGroupByException
*/
private IntermediateSortTempRow getSortedRecordFromFile() throws CarbonDataWriterException {
IntermediateSortTempRow row = null;
// poll the top object from heap
// heap maintains binary tree which is based on heap condition that will
// be based on comparator we are passing the heap
// when will call poll it will always delete root of the tree and then
// it does trickel down operation complexity is log(n)
SortTempFileChunkHolder poll = this.recordHolderHeapLocal.poll();
// get the row from chunk
row = poll.getRow();
// check if there no entry present
if (!poll.hasNext()) {
// if chunk is empty then close the stream
poll.closeStream();
// reaturn row
return row;
}
// read new row
try {
poll.readRow();
} catch (CarbonSortKeyAndGroupByException e) {
close();
throw new CarbonDataWriterException(e);
}
// add to heap
this.recordHolderHeapLocal.add(poll);
// return row
return row;
}
/**
* This method will be used to check whether any more element is present or
* not
*
* @return more element is present
*/
public boolean hasNext() {
return this.recordHolderHeapLocal.size() > 0;
}
public void close() {
if (null != executorService && !executorService.isShutdown()) {
executorService.shutdownNow();
}
if (null != recordHolderHeapLocal) {
SortTempFileChunkHolder sortTempFileChunkHolder;
while (!recordHolderHeapLocal.isEmpty()) {
sortTempFileChunkHolder = recordHolderHeapLocal.poll();
if (null != sortTempFileChunkHolder) {
sortTempFileChunkHolder.closeStream();
}
}
}
}
}
|
jackylk/incubator-carbondata
|
processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
|
Java
|
apache-2.0
| 10,940 |
/*
* #%L
* asio integration
* %%
* Copyright (C) 2013 - 2015 Research Group Scientific Computing, University of Vienna
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package at.ac.univie.isc.asio.matcher;
import at.ac.univie.isc.asio.sql.ConvertToTable;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables;
import com.google.common.collect.Table;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import javax.sql.rowset.RowSetProvider;
import javax.sql.rowset.WebRowSet;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
abstract class SqlResultMatcher extends TypeSafeMatcher<String> {
static class Csv extends SqlResultMatcher {
Csv(final Table<Integer, String, String> expected) {
super(expected);
}
@Override
protected Table<Integer, String, String> parse(final InputStream data) throws IOException {
return ConvertToTable.fromCsv(data);
}
}
static class Webrowset extends SqlResultMatcher {
Webrowset(final Table<Integer, String, String> expected) {
super(expected);
}
@Override
protected Table<Integer, String, String> parse(final InputStream data) throws IOException, SQLException {
final WebRowSet webRowSet = RowSetProvider.newFactory().createWebRowSet();
webRowSet.readXml(data);
return ConvertToTable.fromResultSet(webRowSet);
}
}
private final Table<Integer, String, String> expected;
SqlResultMatcher(final Table<Integer, String, String> expected) {
this.expected = expected;
}
protected abstract Table<Integer, String, String> parse(final InputStream data) throws Exception;
@Override
protected boolean matchesSafely(final String item) {
return expected.equals(doConvert(item));
}
@Override
public void describeTo(final Description description) {
description.appendText(" sql result-set containing ").appendValue(expected);
}
@Override
protected void describeMismatchSafely(final String item, final Description mismatchDescription) {
mismatchDescription.appendText("was ").appendValue(doConvert(item));
}
private Table<Integer, String, String> doConvert(final String item) {
try {
final ByteArrayInputStream raw = new ByteArrayInputStream(item.getBytes(Charsets.UTF_8));
return parse(raw);
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
|
pyranja/asio
|
integration/src/main/java/at/ac/univie/isc/asio/matcher/SqlResultMatcher.java
|
Java
|
apache-2.0
| 2,993 |
/*
* Copyright 2011-2017 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.apigateway.internal;
import com.amazonaws.Request;
import com.amazonaws.Response;
import com.amazonaws.handlers.RequestHandler2;
import java.util.Map;
public final class AcceptJsonRequestHandler extends RequestHandler2 {
@Override
public void beforeRequest(Request<?> request) {
// Some operations marshall to this header, so don't clobber if it exists
if (!request.getHeaders().containsKey("Accept")) {
request.addHeader("Accept", "application/json");
}
}
@Override
public void afterResponse(Request<?> request, Response<?> response) {
// No-op.
}
@Override
public void afterError(
Request<?> request,
Response<?> response,
Exception e) {
// No-op.
}
}
|
dagnir/aws-sdk-java
|
aws-java-sdk-api-gateway/src/main/java/com/amazonaws/services/apigateway/internal/AcceptJsonRequestHandler.java
|
Java
|
apache-2.0
| 1,404 |
package fr.utc.leapband.view;
import jade.gui.GuiEvent;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Font;
import java.beans.PropertyChangeEvent;
import java.io.File;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.SwingConstants;
import fr.utc.leapband.sma.user.UserAgent;
import fr.utc.leapband.utilities.Constance;
import fr.utc.leapband.utilities.ImageFlowItem;
@SuppressWarnings("serial")
public class InstrumentSelectView extends JAgentFrame{
private ImageFlow imageFlow = null;
private JButton home;
public InstrumentSelectView(UserAgent agent) {
super(agent);
this.setTitle("ChooseView");
this.setSize(Constance.Windows_width, Constance.Windows_height);
this.setDefaultCloseOperation(EXIT_ON_CLOSE);
JPanel imageFlowPanel=new JPanel(new BorderLayout());
imageFlowPanel.setBackground(new Color(110, 110, 110));
home = new JButton();
Icon icon = new ImageIcon("images/home.png");
home.setBounds(0,0,100,100);
home.setIcon(icon);
imageFlowPanel.add(home);
JLabel choose=new JLabel("Choose your instrument");
choose.setBounds(500, 10, 500, 200);
choose.setFont(new Font("Chalkboard", Font.PLAIN, 40));
choose.setHorizontalAlignment(SwingConstants.CENTER);
choose.setForeground(Color.ORANGE);
imageFlowPanel.add(choose);
imageFlow = new ImageFlow(new File("images/instrument/"),agent);
imageFlowPanel.add(imageFlow);
this.add(imageFlowPanel);
home.addMouseListener(new HomeMouseListener(this,home));
home.setContentAreaFilled(false);
home.setOpaque(false);
home.setBorderPainted(false);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
super.propertyChange(evt);
if (isVisible()) {
if (evt.getPropertyName().equals("swipe")) {
if ((String)evt.getNewValue() == "LEFT") {
imageFlow.scrollAndAnimateBy(-1);
} else if ((String)evt.getNewValue() == "RIGHT") {
imageFlow.scrollAndAnimateBy(1);
} else if ((String)evt.getNewValue() == "GRAB" && imageFlow.getSelectedIndex() != 2) {
GuiEvent ev = new GuiEvent(this,UserAgent.SELECT_INSTRUMENT_EVENT);
ev.addParameter(UserAgent.instrument_Mode);
ev.addParameter(((ImageFlowItem)imageFlow.getSelectedValue()).getLabel());
myAgent.postGuiEvent(ev);
}
}
}
}
}
|
hukewei/leapband
|
src/fr/utc/leapband/view/InstrumentSelectView.java
|
Java
|
apache-2.0
| 2,477 |
package com.meteorite.core.datasource;
import org.junit.Test;
public class DataSourceManagerTest {
@Test
public void testExp() throws Exception {
}
}
|
weijiancai/metaui
|
core/src/test/java/com/meteorite/core/datasource/DataSourceManagerTest.java
|
Java
|
apache-2.0
| 166 |
//Copyright 2015 Sebastian Bingel
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
namespace sebingel.sharpchievements
{
/// <summary>
/// Delegate for the event that is fired when an AchievementCondition is completed
/// </summary>
/// <param name="achievementCondition">The completed AchievementCondition</param>
public delegate void AchievementConditionCompletedHandler(IAchievementCondition achievementCondition);
}
|
sebingel/sharpchievements
|
Achievements/AchievementConditionCompletedHandler.cs
|
C#
|
apache-2.0
| 938 |
package org.vaadin.addons.producttour.button;
import com.vaadin.event.ConnectorEvent;
import org.vaadin.addons.producttour.provider.StepButtonProvider;
import org.vaadin.addons.producttour.provider.StepProvider;
import org.vaadin.addons.producttour.provider.TourProvider;
import org.vaadin.addons.producttour.step.Step;
import org.vaadin.addons.producttour.tour.Tour;
/**
* Base class for all events that were caused by a {@link StepButton}.
*/
public class StepButtonEvent extends ConnectorEvent implements TourProvider, StepProvider,
StepButtonProvider {
/**
* Construct a new provider.
*
* @param source
* The source of the provider
*/
public StepButtonEvent(StepButton source) {
super(source);
}
/**
* Shortcut method to get the tour of the provider.
*
* @return The tour or <code>null</code> if the button that caused the provider is not attached to
* any step that is attached to a tour.
*/
@Override
public Tour getTour() {
Step step = getStep();
return step != null ? step.getTour() : null;
}
/**
* Shortcut method to get the step of the provider.
*
* @return The step or <code>null</code> if the button that caused the provider is not attached to
* any step.
*/
@Override
public Step getStep() {
StepButton button = getStepButton();
return button != null ? button.getStep() : null;
}
/**
* Get the button that is the source of the provider.
*
* @return The button that caused the provider
*/
@Override
public StepButton getStepButton() {
return (StepButton) getSource();
}
}
|
Juchar/product-tour
|
product-tour-addon/src/main/java/org/vaadin/addons/producttour/button/StepButtonEvent.java
|
Java
|
apache-2.0
| 1,731 |
<?php
/*
* @package s9e\TextFormatter
* @copyright Copyright (c) 2010-2019 The s9e Authors
* @license http://www.opensource.org/licenses/mit-license.php The MIT License
*/
namespace s9e\TextFormatter\Configurator\JavaScript;
use Exception;
abstract class Minifier
{
public $cacheDir;
public $keepGoing = \false;
abstract public function minify($src);
public function get($src)
{
try
{
return (isset($this->cacheDir)) ? $this->getFromCache($src) : $this->minify($src);
}
catch (Exception $e)
{
if (!$this->keepGoing)
throw $e;
}
return $src;
}
public function getCacheDifferentiator()
{
return '';
}
protected function getFromCache($src)
{
$differentiator = $this->getCacheDifferentiator();
$key = \sha1(\serialize([\get_class($this), $differentiator, $src]));
$cacheFile = $this->cacheDir . '/minifier.' . $key . '.js';
if (!\file_exists($cacheFile))
\file_put_contents($cacheFile, $this->minify($src));
return \file_get_contents($cacheFile);
}
}
|
drthomas21/WordPress_Tutorial
|
community_htdocs/vendor/s9e/text-formatter/src/Configurator/JavaScript/Minifier.php
|
PHP
|
apache-2.0
| 1,018 |
using System;
using System.Data;
using System.Collections.Generic;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using DTcms.Common;
namespace DTcms.Web.admin.Transport
{
public partial class goods_edit : Web.UI.ManagePage
{
string defaultpassword = "0|0|0|0"; //默认显示密码
protected string action = DTEnums.ActionEnum.Add.ToString(); //操作类型
private int id = 0;
protected void Page_Load(object sender, EventArgs e)
{
string _action = DTRequest.GetQueryString("action");
if (!string.IsNullOrEmpty(_action) && _action == DTEnums.ActionEnum.Edit.ToString())
{
this.action = DTEnums.ActionEnum.Edit.ToString();//修改类型
this.id = DTRequest.GetQueryInt("id");
if (this.id == 0)
{
JscriptMsg("传输参数不正确!", "back", "Error");
return;
}
if (!new BLL.Goods().Exists(this.id))
{
JscriptMsg("信息不存在或已被删除!", "back", "Error");
return;
}
}
if (!Page.IsPostBack)
{
ChkAdminLevel("goods_list", DTEnums.ActionEnum.View.ToString()); //检查权限
TreeBind(""); //绑定类别
if (action == DTEnums.ActionEnum.Edit.ToString()) //修改
{
ShowInfo(this.id);
}
}
}
#region 绑定类别=================================
private void TreeBind(string strWhere)
{
}
#endregion
#region 赋值操作=================================
private void ShowInfo(int _id)
{
BLL.Goods bll = new BLL.Goods();
Model.Goods model = bll.GetModel(_id);
txtName.Text = model.Name;
txtCategroy.Text = model.CategoryName;
txtCode.Text = model.Code;
txtUnit.Text = model.Unit;
}
#endregion
#region 增加操作=================================
private bool DoAdd()
{
bool result = false;
Model.Goods model = new Model.Goods();
BLL.Goods bll = new BLL.Goods();
model.Name = txtName.Text.Trim();
model.CategoryName = txtCategroy.Text.Trim();
model.Code = string.IsNullOrEmpty(txtCode.Text.Trim()) ? "" : txtCode.Text.Trim();
model.Unit = txtUnit.Text.Trim();
if (bll.Add(model) > 0)
{
AddAdminLog(DTEnums.ActionEnum.Add.ToString(), "添加客户:" + model.Name); //记录日志
result = true;
}
return result;
}
#endregion
#region 修改操作=================================
private bool DoEdit(int _id)
{
bool result = false;
BLL.Goods bll = new BLL.Goods();
Model.Goods model = bll.GetModel(_id);
model.Name = txtName.Text.Trim();
model.CategoryName = txtCategroy.Text.Trim();
model.Code = string.IsNullOrEmpty(txtCode.Text.Trim()) ? "" : txtCode.Text.Trim();
model.Unit = txtUnit.Text.Trim();
if (bll.Update(model))
{
AddAdminLog(DTEnums.ActionEnum.Edit.ToString(), "修改客户信息:" + model.Name); //记录日志
result = true;
}
return result;
}
#endregion
//保存
protected void btnSubmit_Click(object sender, EventArgs e)
{
if (action == DTEnums.ActionEnum.Edit.ToString()) //修改
{
ChkAdminLevel("goods_list", DTEnums.ActionEnum.Edit.ToString()); //检查权限
if (!DoEdit(this.id))
{
JscriptMsg("保存过程中发生错误!", "", "Error");
return;
}
JscriptMsg("修改客户成功!", "goods_list.aspx", "Success");
}
else //添加
{
ChkAdminLevel("goods_list", DTEnums.ActionEnum.Add.ToString()); //检查权限
if (!DoAdd())
{
JscriptMsg("保存过程中发生错误!", "", "Error");
return;
}
JscriptMsg("添加客户成功!", "goods_list.aspx", "Success");
}
}
}
}
|
LutherW/MTMS
|
Source/DTcms.Web/admin/Transport/goods_edit.aspx.cs
|
C#
|
apache-2.0
| 4,623 |
#include "test_helper_library.h"
void euler2quat(double roll, double pitch, double yaw, double* q) {
// Compute quaternion
Eigen::Quaterniond q_ = Eigen::AngleAxisd(yaw, Eigen::Vector3d::UnitZ()) *
Eigen::AngleAxisd(pitch, Eigen::Vector3d::UnitY()) *
Eigen::AngleAxisd(roll, Eigen::Vector3d::UnitX());
// Writing quaternion components to array output
q[0] = q_.w();
q[1] = q_.x();
q[2] = q_.y();
q[3] = q_.z();
}
void calculate3dRmsError(double truth[][3], double est[][3],
const int trajectory_length, const int start_index,
double* error) {
// Looping over trajectories summing squared errors
double error_sum[3] = {0.0, 0.0, 0.0};
for (int i = start_index; i < trajectory_length; i++) {
error_sum[0] += pow(truth[i][0] - est[i][0], 2);
error_sum[1] += pow(truth[i][1] - est[i][1], 2);
error_sum[2] += pow(truth[i][2] - est[i][2], 2);
}
// Averaging
int num_samples = trajectory_length - start_index + 1;
error_sum[0] /= num_samples;
error_sum[1] /= num_samples;
error_sum[2] /= num_samples;
// Square rooting to obtain RMS
error[0] = sqrt(error_sum[0]);
error[1] = sqrt(error_sum[1]);
error[2] = sqrt(error_sum[2]);
}
void calculateQuaternionRmsError(double truth[][4], double est[][4],
const int trajectory_length,
const int start_index, double* error) {
// Generating the error trajectory
double error_trajectory[trajectory_length][3];
for (int i = 0; i < trajectory_length; i++) {
// Turning vectors into quaternions
Eigen::Quaterniond orientation_truth(truth[i][0], truth[i][1], truth[i][2],
truth[i][3]);
Eigen::Quaterniond orientation_estimate(est[i][0], est[i][1], est[i][2],
est[i][3]);
// Calculating the error quaternion
Eigen::Quaterniond error_quaternion =
orientation_estimate.inverse() * orientation_truth;
// Extracting the three meaningful components of the error quaternion
error_trajectory[i][0] = error_quaternion.x();
error_trajectory[i][1] = error_quaternion.y();
error_trajectory[i][2] = error_quaternion.z();
}
// Looping over trajectories summing squared errors
double error_sum[3] = {0.0, 0.0, 0.0};
for (int i = start_index; i < trajectory_length; i++) {
error_sum[0] += pow(error_trajectory[i][0], 2);
error_sum[1] += pow(error_trajectory[i][1], 2);
error_sum[2] += pow(error_trajectory[i][2], 2);
}
// Averaging
int num_samples = trajectory_length - start_index + 1;
error_sum[0] /= num_samples;
error_sum[1] /= num_samples;
error_sum[2] /= num_samples;
// Square rooting to obtain RMS
error[0] = sqrt(error_sum[0]);
error[1] = sqrt(error_sum[1]);
error[2] = sqrt(error_sum[2]);
}
|
ethz-asl/ros_vrpn_client
|
src/test/library/test_helper_library.cpp
|
C++
|
apache-2.0
| 2,913 |
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.dashboard', ['ui.bootstrap', 'ui.sortable']);
angular.module('ui.dashboard')
.directive('dashboard', ['WidgetModel', 'WidgetDefCollection', '$uibModal', 'DashboardState', '$log', function (WidgetModel, WidgetDefCollection, $uibModal, DashboardState, $log) {
return {
restrict: 'A',
templateUrl: function(element, attr) {
return attr.templateUrl ? attr.templateUrl : 'components/directives/dashboard/dashboard.html';
},
scope: true,
controller: ['$scope', '$attrs', function (scope, attrs) {
// default options
var defaults = {
stringifyStorage: true,
hideWidgetSettings: false,
hideWidgetClose: false,
settingsModalOptions: {
templateUrl: 'components/directives/dashboard/widget-settings-template.html',
controller: 'WidgetSettingsCtrl'
},
onSettingsClose: function(result, widget) { // NOTE: dashboard scope is also passed as 3rd argument
jQuery.extend(true, widget, result);
},
onSettingsDismiss: function(reason) { // NOTE: dashboard scope is also passed as 2nd argument
$log.info('widget settings were dismissed. Reason: ', reason);
}
};
// from dashboard="options"
scope.options = scope.$eval(attrs.dashboard);
// Ensure settingsModalOptions exists on scope.options
scope.options.settingsModalOptions = scope.options.settingsModalOptions !== undefined ? scope.options.settingsModalOptions : {};
// Set defaults
_.defaults(scope.options.settingsModalOptions, defaults.settingsModalOptions);
// Shallow options
_.defaults(scope.options, defaults);
// sortable options
var sortableDefaults = {
stop: function () {
scope.saveDashboard();
},
handle: '.widget-header',
distance: 5
};
scope.sortableOptions = angular.extend({}, sortableDefaults, scope.options.sortableOptions || {});
}],
link: function (scope) {
// Save default widget config for reset
scope.defaultWidgets = scope.options.defaultWidgets;
scope.widgetDefs = new WidgetDefCollection(scope.options.widgetDefinitions);
var count = 1;
// Instantiate new instance of dashboard state
scope.dashboardState = new DashboardState(
scope.options.storage,
scope.options.storageId,
scope.options.storageHash,
scope.widgetDefs,
scope.options.stringifyStorage
);
function getWidget(widgetToInstantiate) {
if (typeof widgetToInstantiate === 'string') {
widgetToInstantiate = {
name: widgetToInstantiate
};
}
var defaultWidgetDefinition = scope.widgetDefs.getByName(widgetToInstantiate.name);
if (!defaultWidgetDefinition) {
throw 'Widget ' + widgetToInstantiate.name + ' is not found.';
}
// Determine the title for the new widget
var title;
if (!widgetToInstantiate.title && !defaultWidgetDefinition.title) {
widgetToInstantiate.title = 'Widget ' + count++;
}
// Instantiation
return new WidgetModel(defaultWidgetDefinition, widgetToInstantiate);
}
/**
* Instantiates a new widget and append it the dashboard
* @param {Object} widgetToInstantiate The definition object of the widget to be instantiated
*/
scope.addWidget = function (widgetToInstantiate, doNotSave) {
var widget = getWidget(widgetToInstantiate);
// Add to the widgets array
scope.widgets.push(widget);
if (!doNotSave) {
scope.saveDashboard();
}
return widget;
};
/**
* Instantiates a new widget and insert it a beginning of dashboard
*/
scope.prependWidget = function(widgetToInstantiate, doNotSave) {
var widget = getWidget(widgetToInstantiate);
// Add to the widgets array
scope.widgets.unshift(widget);
if (!doNotSave) {
scope.saveDashboard();
}
return widget;
};
/**
* Removes a widget instance from the dashboard
* @param {Object} widget The widget instance object (not a definition object)
*/
scope.removeWidget = function (widget) {
scope.widgets.splice(_.indexOf(scope.widgets, widget), 1);
scope.saveDashboard();
};
/**
* Opens a dialog for setting and changing widget properties
* @param {Object} widget The widget instance object
*/
scope.openWidgetSettings = function (widget) {
// Set up $uibModal options
var options = _.defaults(
{ scope: scope },
widget.settingsModalOptions,
scope.options.settingsModalOptions);
// Ensure widget is resolved
options.resolve = {
widget: function () {
return widget;
}
};
// Create the modal
var modalInstance = $uibModal.open(options);
var onClose = widget.onSettingsClose || scope.options.onSettingsClose;
var onDismiss = widget.onSettingsDismiss || scope.options.onSettingsDismiss;
// Set resolve and reject callbacks for the result promise
modalInstance.result.then(
function (result) {
// Call the close callback
onClose(result, widget, scope);
//AW Persist title change from options editor
scope.$emit('widgetChanged', widget);
},
function (reason) {
// Call the dismiss callback
onDismiss(reason, scope);
}
);
};
/**
* Remove all widget instances from dashboard
*/
scope.clear = function (doNotSave) {
scope.widgets = [];
if (doNotSave === true) {
return;
}
scope.saveDashboard();
};
/**
* Used for preventing default on click event
* @param {Object} event A click event
* @param {Object} widgetDef A widget definition object
*/
scope.addWidgetInternal = function (event, widgetDef) {
event.preventDefault();
scope.addWidget(widgetDef);
};
/**
* Uses dashboardState service to save state
*/
scope.saveDashboard = function (force) {
if (!scope.options.explicitSave) {
return scope.dashboardState.save(scope.widgets);
} else {
if (!angular.isNumber(scope.options.unsavedChangeCount)) {
scope.options.unsavedChangeCount = 0;
}
if (force) {
scope.options.unsavedChangeCount = 0;
return scope.dashboardState.save(scope.widgets);
} else {
++scope.options.unsavedChangeCount;
}
}
};
/**
* Wraps saveDashboard for external use.
*/
scope.externalSaveDashboard = function(force) {
if (angular.isDefined(force)) {
return scope.saveDashboard(force);
} else {
return scope.saveDashboard(true);
}
};
/**
* Clears current dash and instantiates widget definitions
* @param {Array} widgets Array of definition objects
*/
scope.loadWidgets = function (widgets) {
// AW dashboards are continuously saved today (no "save" button).
//scope.defaultWidgets = widgets;
scope.savedWidgetDefs = widgets;
scope.clear(true);
_.each(widgets, function (widgetDef) {
scope.addWidget(widgetDef, true);
});
};
/**
* Resets widget instances to default config
* @return {[type]} [description]
*/
scope.resetWidgetsToDefault = function () {
scope.loadWidgets(scope.defaultWidgets);
scope.saveDashboard();
};
// Set default widgets array
var savedWidgetDefs = scope.dashboardState.load();
// Success handler
function handleStateLoad(saved) {
scope.options.unsavedChangeCount = 0;
if (saved && saved.length) {
scope.loadWidgets(saved);
} else if (scope.defaultWidgets) {
scope.loadWidgets(scope.defaultWidgets);
} else {
scope.clear(true);
}
}
if (angular.isArray(savedWidgetDefs)) {
handleStateLoad(savedWidgetDefs);
} else if (savedWidgetDefs && angular.isObject(savedWidgetDefs) && angular.isFunction(savedWidgetDefs.then)) {
savedWidgetDefs.then(handleStateLoad, handleStateLoad);
} else {
handleStateLoad();
}
// expose functionality externally
// functions are appended to the provided dashboard options
scope.options.addWidget = scope.addWidget;
scope.options.prependWidget = scope.prependWidget;
scope.options.loadWidgets = scope.loadWidgets;
scope.options.saveDashboard = scope.externalSaveDashboard;
scope.options.removeWidget = scope.removeWidget;
scope.options.openWidgetSettings = scope.openWidgetSettings;
scope.options.clear = scope.clear;
scope.options.resetWidgetsToDefault = scope.resetWidgetsToDefault;
scope.options.currentWidgets = scope.widgets;
// save state
scope.$on('widgetChanged', function (event) {
event.stopPropagation();
scope.saveDashboard();
});
}
};
}]);
|
DataTorrent/malhar-angular-dashboard
|
src/components/directives/dashboard/dashboard.js
|
JavaScript
|
apache-2.0
| 10,601 |
package com.gentics.mesh.core.endpoint.admin;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import com.gentics.mesh.cli.BootstrapInitializer;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.rest.common.GenericMessageResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.reactivex.Completable;
import io.reactivex.schedulers.Schedulers;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
/**
* Handler for the shutdown endpoint.
*/
public class ShutdownHandler {
private static final Logger log = LoggerFactory.getLogger(ShutdownHandler.class);
private final BootstrapInitializer boot;
@Inject
public ShutdownHandler(BootstrapInitializer boot) {
this.boot = boot;
}
/**
* Invoke the shutdown process.
*
* @param context
*/
public void shutdown(InternalActionContext context) {
log.info("Initiating shutdown");
context.send(new GenericMessageResponse("Shutdown initiated"), HttpResponseStatus.OK);
Completable.fromAction(() -> {
boot.mesh().shutdownAndTerminate(1);
})
.subscribeOn(Schedulers.newThread()).timeout(1, TimeUnit.MINUTES)
.subscribe(() -> log.info("Shutdown successful"), err -> {
log.error("Shutdown failed", err);
log.error("Forcing process exit");
Runtime.getRuntime().halt(1);
});
}
}
|
gentics/mesh
|
core/src/main/java/com/gentics/mesh/core/endpoint/admin/ShutdownHandler.java
|
Java
|
apache-2.0
| 1,369 |
package com.shareyourproxy.api.domain.model;
import android.os.Parcelable;
import android.support.annotation.Nullable;
import com.shareyourproxy.api.domain.factory.AutoValueClass;
import java.util.HashMap;
import java.util.HashSet;
import auto.parcel.AutoParcel;
import static com.shareyourproxy.util.ObjectUtils.buildFullName;
/**
* Users have a basic profile that contains their specific {@link Channel}s, {@link Contact}s, and {@link Group}s.
*/
@AutoParcel
@AutoValueClass(autoValueClass = AutoParcel_User.class)
public abstract class User implements Parcelable {
/**
* User Constructor.
*
* @param id user unique ID
* @param firstName user first name
* @param lastName user last name
* @param email user email
* @param profileURL user profile picture
* @param coverURL user cover image
* @param channels user channels
* @param groups user contactGroups
* @param contacts user contacts
* @param version user apk version
* @return the entered user data
*/
public static User create(
String id, String firstName, String lastName, String email, String profileURL,
String coverURL, HashMap<String, Channel> channels, HashMap<String, Group> groups,
HashSet<String> contacts, int version) {
String fullName = buildFullName(firstName, lastName);
return builder().id(id).first(firstName).last(lastName).fullName(fullName).email(email)
.profileURL(profileURL).coverURL(coverURL).channels(channels)
.groups(groups).contacts(contacts).version(version).build();
}
/**
* User builder.
*
* @return this User.
*/
public static Builder builder() {
return new AutoParcel_User.Builder();
}
/**
* Get users unique ID.
*
* @return first name
*/
public abstract String id();
/**
* Get users first name.
*
* @return first name
*/
public abstract String first();
/**
* Get users last name.
*
* @return last name
*/
@Nullable
public abstract String last();
/**
* Get users first + " " + last.
*
* @return last name
*/
public abstract String fullName();
/**
* Get users email.
*
* @return email
*/
@Nullable
public abstract String email();
/**
* Get user profile image.
*
* @return profile image
*/
@Nullable
public abstract String profileURL();
/**
* Get user profile image.
*
* @return profile image
*/
@Nullable
public abstract String coverURL();
/**
* Get users channels.
*
* @return channels
*/
@Nullable
public abstract HashMap<String, Channel> channels();
/**
* Get users contacts.
*
* @return contacts
*/
@Nullable
public abstract HashSet<String> contacts();
/**
* Get users contactGroups.
*
* @return contactGroups
*/
@Nullable
public abstract HashMap<String, Group> groups();
/**
* Get users apk version
*
* @return apk code
*/
@Nullable
public abstract Integer version();
/**
* Validation conditions.
*/
@AutoParcel.Validate
public void validate() {
if (first().length() == 0) {
throw new IllegalStateException("Need a valid first name");
}
}
/**
* User Builder.
*/
@AutoParcel.Builder
public interface Builder {
/**
* Set user id.
*
* @param id user unique id
* @return user id
*/
Builder id(String id);
/**
* Set user first name.
*
* @param firstName user first name
* @return first name string
*/
Builder first(String firstName);
/**
* Set users last name.
*
* @param lastName user last name
* @return last name string
*/
@Nullable
Builder last(String lastName);
/**
* Set users first + last.
*
* @param fullName user first + last
* @return last name string
*/
Builder fullName(String fullName);
/**
* Set user email.
*
* @param email this email
* @return email string
*/
@Nullable
Builder email(String email);
/**
* Set the user profile image URL.
*
* @param profileURL profile image url
* @return URL string
*/
@Nullable
Builder profileURL(String profileURL);
/**
* Set the user profile image URL.
*
* @param coverURL profile cover url
* @return URL string
*/
@Nullable
Builder coverURL(String coverURL);
/**
* Set this {@link User}s {@link Contact}s
*
* @param contacts user contacts
* @return List {@link Contact}
*/
@Nullable
Builder contacts(HashSet<String> contacts);
/**
* Set this {@link User}s {@link Group}s
*
* @param groups user contactGroups
* @return List {@link Group}
*/
@Nullable
Builder groups(HashMap<String, Group> groups);
/**
* Set this {@link User}s {@link Channel}s
*
* @param channels user channels
* @return List {@link Channel}
*/
@Nullable
Builder channels(HashMap<String, Channel> channels);
/**
* Set this users apk version
*
* @param version user apk version
* @return version of build
*/
@Nullable
Builder version(Integer version);
/**
* BUILD.
*
* @return User
*/
User build();
}
}
|
ProxyApp/Proxy
|
Application/src/main/java/com/shareyourproxy/api/domain/model/User.java
|
Java
|
apache-2.0
| 5,970 |
package ru.job4j.synchronize;
import net.jcip.annotations.GuardedBy;
import net.jcip.annotations.ThreadSafe;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Хранилище User.
*/
@ThreadSafe
public class UserStorage {
/**
* Карта для хранения User.
*/
private Map<Integer, User> storage = new HashMap<>();
/**
* Добавить User.
* @param user User.
* @return true, если добавлен.
*/
@GuardedBy("this")
public synchronized boolean add(User user) {
boolean result = false;
if (!storage.containsKey(user.getId())) {
storage.put(user.getId(), user);
result = true;
}
return result;
}
/**
* Обновить User.
* @param user User.
* @return true, если найден и обновлен.
*/
@GuardedBy("this")
public synchronized boolean update(User user) {
boolean result = false;
if (storage.containsKey(user.getId())) {
storage.put(user.getId(), user);
result = true;
}
return result;
}
/**
* Удалить User.
* @param user User.
* @return true, если найден и удален.
*/
@GuardedBy("this")
public synchronized boolean delete(User user) {
boolean result = false;
if (storage.containsKey(user.getId())) {
storage.remove(user.getId());
result = true;
}
return result;
}
/**
* Перевести сумму между User.
* @param fromId ID отправителя.
* @param toId ID получателя.
* @param amount сумма.
* @return true, если успешно выполнено.
*/
@GuardedBy("this")
public synchronized boolean transfer(int fromId, int toId, int amount) {
boolean result = false;
if (storage.containsKey(fromId) && storage.containsKey(toId)) {
if (storage.get(fromId).getAmount() > amount) {
int temp = storage.get(fromId).getAmount();
storage.replace(fromId, new User(fromId, temp - amount));
temp = storage.get(toId).getAmount();
storage.replace(toId, new User(toId, temp + amount));
result = true;
}
}
return result;
}
/**
* Получить список всех пользователей в UserStorage.
* @return список всех пользователей в UserStorage.
*/
@GuardedBy("this")
public synchronized List<User> getUsers() {
List<User> users = new ArrayList<User>(storage.values());
return users;
}
}
|
alexeremeev/aeremeev
|
chapter_007/src/main/java/ru/job4j/synchronize/UserStorage.java
|
Java
|
apache-2.0
| 2,810 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test_session_expiration;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import common.CountdownWatcher;
public class TestSessionExpiration {
public static void main(String[] args) throws Exception {
System.out.println("Starting zk1");
// Open a client connection - zk1
CountdownWatcher watch1 = new CountdownWatcher("zk1");
ZooKeeper zk1 = new ZooKeeper(args[0], 10000, watch1);
watch1.waitForConnected(10000);
zk1.getData("/", false, null);
System.out.println("Starting zk2");
// now attach a second client zk2 with the same sessionid/passwd
CountdownWatcher watch2 = new CountdownWatcher("zk2");
ZooKeeper zk2 = new ZooKeeper(args[0], 10000, watch2,
zk1.getSessionId(), zk1.getSessionPasswd());
watch2.waitForConnected(10000);
// close the second client, the session is now invalid
System.out.println("Closing zk2");
zk2.close();
System.out.println("Attempting use of zk1");
try {
// this will throw session expired exception
zk1.getData("/", false, null);
} catch (KeeperException.SessionExpiredException e) {
System.out.println("Got session expired on zk1!");
return;
}
// 3.2.0 and later:
// There's a gotcha though - In version 3.2.0 and later if you
// run this on against a quorum (vs standalone) you may get a
// KeeperException.SessionMovedException instead. This is
// thrown if a client moved from one server to a second, but
// then attempts to talk to the first server (should never
// happen, but could in certain bad situations), this example
// simulates that situation in the sense that the client with
// session id zk1.getSessionId() has moved
//
// One way around session moved on a quorum is to have each
// client connect to the same, single server in the cluster
// (so pass a single host:port rather than a list). This
// will ensure that you get the session expiration, and
// not session moved exception.
//
// Again, if you run against standalone server you won't see
// this. If you run against a server version 3.1.x or earlier
// you won't see this.
// If you run against quorum you need to easily determine which
// server zk1 is attached to - we are adding this capability
// in 3.3.0 - and have zk2 attach to that same server.
System.err.println("Oops, this should NOT have happened!");
}
}
|
phunt/zkexamples
|
src/test_session_expiration/TestSessionExpiration.java
|
Java
|
apache-2.0
| 3,492 |
package tamil.lang.api.join;
import tamil.lang.known.IKnownWord;
/**
* <p>
* Joins known words and based on the type of புணர்ச்சி.
*
* </p>
*
* @author velsubra
*/
public interface KnownWordsJoiner {
public static enum TYPE {
VEATTUMAI,
ALVAZHI
}
/**
* adds word to the current sum of the joiner by means of doing புணர்ச்சி
* @param word the word to be added
*/
public void addVaruMozhi(IKnownWord word, TYPE type);
/**
* adds the current sum of the joiner into the given word by doing புணர்ச்சி
* @param word the word to be inserted
*/
public void addNilaiMozhi(IKnownWord word, TYPE type);
/**
* The effective word that is generated.
* @return the sum out of one or more additions using {@link #addVaruMozhi(tamil.lang.known.IKnownWord, tamil.lang.api.join.KnownWordsJoiner.TYPE)}
*/
public IKnownWord getSum();
}
|
velsubra/Tamil
|
ezhuththu/src/main/java/tamil/lang/api/join/KnownWordsJoiner.java
|
Java
|
apache-2.0
| 984 |
package main
import (
"fmt"
"github.com/satori/go.uuid"
)
func main() {
// Creating UUID Version 4
u1 := uuid.NewV4()
fmt.Printf("UUIDv4: %s\n", u1)
// Parsing UUID from string input
u2, err := uuid.FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8")
if err != nil {
fmt.Printf("Something gone wrong: %s\n", err)
}
fmt.Printf("Successfully parsed: %s\n", u2)
}
|
gothxx/backyard
|
go/src/test/makeUUID.go
|
GO
|
apache-2.0
| 411 |
import os
import threading
import datetime
import cloudstorage as gcs
from google.appengine.api import app_identity
class FileServer():
def __init__(self):
bucket_name = os.environ.get('BUCKET_NAME',
app_identity.get_default_gcs_bucket_name())
self.bucket = '/' + bucket_name
def GetFileForPath(self, path):
try:
full_path = self.bucket + '/' + path
file_obj = gcs.open(full_path)
data = file_obj.read()
file_obj.close()
return data
except gcs.NotFoundError:
return None
|
benmorss/excalibur
|
cloudserver.py
|
Python
|
apache-2.0
| 563 |
package io.skysail.server.app.demo.timetable.course.resources;
import org.restlet.resource.ResourceException;
import io.skysail.domain.core.repos.Repository;
import io.skysail.server.ResourceContextId;
import io.skysail.server.app.demo.DemoApplication;
import io.skysail.server.app.demo.timetable.course.Course;
import io.skysail.server.app.demo.timetable.timetables.Timetable;
import io.skysail.server.restlet.resources.PostEntityServerResource;
public class PostCourseResource extends PostEntityServerResource<Course> {
private DemoApplication app;
public PostCourseResource() {
addToContext(ResourceContextId.LINK_TITLE, "Create new ");
}
@Override
protected void doInit() throws ResourceException {
app = (DemoApplication) getApplication();
}
@Override
public Course createEntityTemplate() {
return new Course();
}
@Override
public void addEntity(Course entity) {
// Subject subject = SecurityUtils.getSubject();
Timetable entityRoot = app.getTtRepo().findOne(getAttribute("id"));
entityRoot.getCourses().add(entity);
app.getTtRepo().update(entityRoot, app.getApplicationModel());
}
@Override
public String redirectTo() {
return super.redirectTo(CoursesResource.class);
}
}
|
evandor/skysail
|
skysail.server.app.demo/src/io/skysail/server/app/demo/timetable/course/resources/PostCourseResource.java
|
Java
|
apache-2.0
| 1,307 |
/*
* MinIO Cloud Storage, (C) 2017-2020 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package s3
import (
"context"
"encoding/json"
"io"
"math/rand"
"net/http"
"net/url"
"strings"
"time"
"github.com/minio/cli"
miniogo "github.com/minio/minio-go/v6"
"github.com/minio/minio-go/v6/pkg/credentials"
"github.com/minio/minio-go/v6/pkg/tags"
minio "github.com/minio/minio/cmd"
"github.com/minio/minio-go/v6/pkg/encrypt"
"github.com/minio/minio-go/v6/pkg/s3utils"
xhttp "github.com/minio/minio/cmd/http"
"github.com/minio/minio/cmd/logger"
"github.com/minio/minio/pkg/auth"
"github.com/minio/minio/pkg/bucket/policy"
)
const (
s3Backend = "s3"
)
func init() {
const s3GatewayTemplate = `NAME:
{{.HelpName}} - {{.Usage}}
USAGE:
{{.HelpName}} {{if .VisibleFlags}}[FLAGS]{{end}} [ENDPOINT]
{{if .VisibleFlags}}
FLAGS:
{{range .VisibleFlags}}{{.}}
{{end}}{{end}}
ENDPOINT:
s3 server endpoint. Default ENDPOINT is https://s3.amazonaws.com
EXAMPLES:
1. Start minio gateway server for AWS S3 backend
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_ACCESS_KEY{{.AssignmentOperator}}accesskey
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_SECRET_KEY{{.AssignmentOperator}}secretkey
{{.Prompt}} {{.HelpName}}
2. Start minio gateway server for AWS S3 backend with edge caching enabled
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_ACCESS_KEY{{.AssignmentOperator}}accesskey
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_SECRET_KEY{{.AssignmentOperator}}secretkey
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_DRIVES{{.AssignmentOperator}}"/mnt/drive1,/mnt/drive2,/mnt/drive3,/mnt/drive4"
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_EXCLUDE{{.AssignmentOperator}}"bucket1/*,*.png"
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_QUOTA{{.AssignmentOperator}}90
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_AFTER{{.AssignmentOperator}}3
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_WATERMARK_LOW{{.AssignmentOperator}}75
{{.Prompt}} {{.EnvVarSetCommand}} MINIO_CACHE_WATERMARK_HIGH{{.AssignmentOperator}}85
{{.Prompt}} {{.HelpName}}
`
minio.RegisterGatewayCommand(cli.Command{
Name: s3Backend,
Usage: "Amazon Simple Storage Service (S3)",
Action: s3GatewayMain,
CustomHelpTemplate: s3GatewayTemplate,
HideHelpCommand: true,
})
}
// Handler for 'minio gateway s3' command line.
func s3GatewayMain(ctx *cli.Context) {
args := ctx.Args()
if !ctx.Args().Present() {
args = cli.Args{"https://s3.amazonaws.com"}
}
serverAddr := ctx.GlobalString("address")
if serverAddr == "" || serverAddr == ":"+minio.GlobalMinioDefaultPort {
serverAddr = ctx.String("address")
}
// Validate gateway arguments.
logger.FatalIf(minio.ValidateGatewayArguments(serverAddr, args.First()), "Invalid argument")
// Start the gateway..
minio.StartGateway(ctx, &S3{args.First()})
}
// S3 implements Gateway.
type S3 struct {
host string
}
// Name implements Gateway interface.
func (g *S3) Name() string {
return s3Backend
}
const letterBytes = "abcdefghijklmnopqrstuvwxyz01234569"
const (
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
)
// randString generates random names and prepends them with a known prefix.
func randString(n int, src rand.Source, prefix string) string {
b := make([]byte, n)
// A rand.Int63() generates 63 random bits, enough for letterIdxMax letters!
for i, cache, remain := n-1, src.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = src.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return prefix + string(b[0:30-len(prefix)])
}
// Chains all credential types, in the following order:
// - AWS env vars (i.e. AWS_ACCESS_KEY_ID)
// - AWS creds file (i.e. AWS_SHARED_CREDENTIALS_FILE or ~/.aws/credentials)
// - Static credentials provided by user (i.e. MINIO_ACCESS_KEY)
var defaultProviders = []credentials.Provider{
&credentials.EnvAWS{},
&credentials.FileAWSCredentials{},
&credentials.EnvMinio{},
}
// Chains all credential types, in the following order:
// - AWS env vars (i.e. AWS_ACCESS_KEY_ID)
// - AWS creds file (i.e. AWS_SHARED_CREDENTIALS_FILE or ~/.aws/credentials)
// - IAM profile based credentials. (performs an HTTP
// call to a pre-defined endpoint, only valid inside
// configured ec2 instances)
var defaultAWSCredProviders = []credentials.Provider{
&credentials.EnvAWS{},
&credentials.FileAWSCredentials{},
&credentials.IAM{
Client: &http.Client{
Transport: minio.NewGatewayHTTPTransport(),
},
},
&credentials.EnvMinio{},
}
// newS3 - Initializes a new client by auto probing S3 server signature.
func newS3(urlStr string) (*miniogo.Core, error) {
if urlStr == "" {
urlStr = "https://s3.amazonaws.com"
}
u, err := url.Parse(urlStr)
if err != nil {
return nil, err
}
// Override default params if the host is provided
endpoint, secure, err := minio.ParseGatewayEndpoint(urlStr)
if err != nil {
return nil, err
}
var creds *credentials.Credentials
if s3utils.IsAmazonEndpoint(*u) {
// If we see an Amazon S3 endpoint, then we use more ways to fetch backend credentials.
// Specifically IAM style rotating credentials are only supported with AWS S3 endpoint.
creds = credentials.NewChainCredentials(defaultAWSCredProviders)
} else {
creds = credentials.NewChainCredentials(defaultProviders)
}
options := miniogo.Options{
Creds: creds,
Secure: secure,
Region: s3utils.GetRegionFromURL(*u),
BucketLookup: miniogo.BucketLookupAuto,
}
clnt, err := miniogo.NewWithOptions(endpoint, &options)
if err != nil {
return nil, err
}
return &miniogo.Core{Client: clnt}, nil
}
// NewGatewayLayer returns s3 ObjectLayer.
func (g *S3) NewGatewayLayer(creds auth.Credentials) (minio.ObjectLayer, error) {
// creds are ignored here, since S3 gateway implements chaining
// all credentials.
clnt, err := newS3(g.host)
if err != nil {
return nil, err
}
metrics := minio.NewMetrics()
t := &minio.MetricsTransport{
Transport: minio.NewGatewayHTTPTransport(),
Metrics: metrics,
}
// Set custom transport
clnt.SetCustomTransport(t)
probeBucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "probe-bucket-sign-")
// Check if the provided keys are valid.
if _, err = clnt.BucketExists(probeBucketName); err != nil {
if miniogo.ToErrorResponse(err).Code != "AccessDenied" {
return nil, err
}
}
s := s3Objects{
Client: clnt,
Metrics: metrics,
HTTPClient: &http.Client{
Transport: t,
},
}
// Enables single encryption of KMS is configured.
if minio.GlobalKMS != nil {
encS := s3EncObjects{s}
// Start stale enc multipart uploads cleanup routine.
go encS.cleanupStaleEncMultipartUploads(minio.GlobalContext,
minio.GlobalMultipartCleanupInterval, minio.GlobalMultipartExpiry)
return &encS, nil
}
return &s, nil
}
// Production - s3 gateway is production ready.
func (g *S3) Production() bool {
return true
}
// s3Objects implements gateway for MinIO and S3 compatible object storage servers.
type s3Objects struct {
minio.GatewayUnsupported
Client *miniogo.Core
HTTPClient *http.Client
Metrics *minio.Metrics
}
// GetMetrics returns this gateway's metrics
func (l *s3Objects) GetMetrics(ctx context.Context) (*minio.Metrics, error) {
return l.Metrics, nil
}
// Shutdown saves any gateway metadata to disk
// if necessary and reload upon next restart.
func (l *s3Objects) Shutdown(ctx context.Context) error {
return nil
}
// StorageInfo is not relevant to S3 backend.
func (l *s3Objects) StorageInfo(ctx context.Context, _ bool) (si minio.StorageInfo, _ []error) {
si.Backend.Type = minio.BackendGateway
si.Backend.GatewayOnline = minio.IsBackendOnline(ctx, l.HTTPClient, l.Client.EndpointURL().String())
return si, nil
}
// MakeBucket creates a new container on S3 backend.
func (l *s3Objects) MakeBucketWithLocation(ctx context.Context, bucket, location string, lockEnabled bool) error {
if lockEnabled {
return minio.NotImplemented{}
}
// Verify if bucket name is valid.
// We are using a separate helper function here to validate bucket
// names instead of IsValidBucketName() because there is a possibility
// that certains users might have buckets which are non-DNS compliant
// in us-east-1 and we might severely restrict them by not allowing
// access to these buckets.
// Ref - http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html
if s3utils.CheckValidBucketName(bucket) != nil {
return minio.BucketNameInvalid{Bucket: bucket}
}
err := l.Client.MakeBucket(bucket, location)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return err
}
// GetBucketInfo gets bucket metadata..
func (l *s3Objects) GetBucketInfo(ctx context.Context, bucket string) (bi minio.BucketInfo, e error) {
buckets, err := l.Client.ListBuckets()
if err != nil {
// Listbuckets may be disallowed, proceed to check if
// bucket indeed exists, if yes return success.
var ok bool
if ok, err = l.Client.BucketExists(bucket); err != nil {
return bi, minio.ErrorRespToObjectError(err, bucket)
}
if !ok {
return bi, minio.BucketNotFound{Bucket: bucket}
}
return minio.BucketInfo{
Name: bi.Name,
Created: time.Now().UTC(),
}, nil
}
for _, bi := range buckets {
if bi.Name != bucket {
continue
}
return minio.BucketInfo{
Name: bi.Name,
Created: bi.CreationDate,
}, nil
}
return bi, minio.BucketNotFound{Bucket: bucket}
}
// ListBuckets lists all S3 buckets
func (l *s3Objects) ListBuckets(ctx context.Context) ([]minio.BucketInfo, error) {
buckets, err := l.Client.ListBuckets()
if err != nil {
return nil, minio.ErrorRespToObjectError(err)
}
b := make([]minio.BucketInfo, len(buckets))
for i, bi := range buckets {
b[i] = minio.BucketInfo{
Name: bi.Name,
Created: bi.CreationDate,
}
}
return b, err
}
// DeleteBucket deletes a bucket on S3
func (l *s3Objects) DeleteBucket(ctx context.Context, bucket string, forceDelete bool) error {
err := l.Client.RemoveBucket(bucket)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return nil
}
// ListObjects lists all blobs in S3 bucket filtered by prefix
func (l *s3Objects) ListObjects(ctx context.Context, bucket string, prefix string, marker string, delimiter string, maxKeys int) (loi minio.ListObjectsInfo, e error) {
result, err := l.Client.ListObjects(bucket, prefix, marker, delimiter, maxKeys)
if err != nil {
return loi, minio.ErrorRespToObjectError(err, bucket)
}
return minio.FromMinioClientListBucketResult(bucket, result), nil
}
// ListObjectsV2 lists all blobs in S3 bucket filtered by prefix
func (l *s3Objects) ListObjectsV2(ctx context.Context, bucket, prefix, continuationToken, delimiter string, maxKeys int, fetchOwner bool, startAfter string) (loi minio.ListObjectsV2Info, e error) {
result, err := l.Client.ListObjectsV2(bucket, prefix, continuationToken, fetchOwner, delimiter, maxKeys, startAfter)
if err != nil {
return loi, minio.ErrorRespToObjectError(err, bucket)
}
return minio.FromMinioClientListBucketV2Result(bucket, result), nil
}
// GetObjectNInfo - returns object info and locked object ReadCloser
func (l *s3Objects) GetObjectNInfo(ctx context.Context, bucket, object string, rs *minio.HTTPRangeSpec, h http.Header, lockType minio.LockType, opts minio.ObjectOptions) (gr *minio.GetObjectReader, err error) {
var objInfo minio.ObjectInfo
objInfo, err = l.GetObjectInfo(ctx, bucket, object, opts)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
var startOffset, length int64
startOffset, length, err = rs.GetOffsetLength(objInfo.Size)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
pr, pw := io.Pipe()
go func() {
err := l.GetObject(ctx, bucket, object, startOffset, length, pw, objInfo.ETag, opts)
pw.CloseWithError(err)
}()
// Setup cleanup function to cause the above go-routine to
// exit in case of partial read
pipeCloser := func() { pr.Close() }
return minio.NewGetObjectReaderFromReader(pr, objInfo, opts, pipeCloser)
}
// GetObject reads an object from S3. Supports additional
// parameters like offset and length which are synonymous with
// HTTP Range requests.
//
// startOffset indicates the starting read location of the object.
// length indicates the total length of the object.
func (l *s3Objects) GetObject(ctx context.Context, bucket string, key string, startOffset int64, length int64, writer io.Writer, etag string, o minio.ObjectOptions) error {
if length < 0 && length != -1 {
return minio.ErrorRespToObjectError(minio.InvalidRange{}, bucket, key)
}
opts := miniogo.GetObjectOptions{}
opts.ServerSideEncryption = o.ServerSideEncryption
if startOffset >= 0 && length >= 0 {
if err := opts.SetRange(startOffset, startOffset+length-1); err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
}
object, _, _, err := l.Client.GetObject(bucket, key, opts)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
defer object.Close()
if _, err := io.Copy(writer, object); err != nil {
return minio.ErrorRespToObjectError(err, bucket, key)
}
return nil
}
// GetObjectInfo reads object info and replies back ObjectInfo
func (l *s3Objects) GetObjectInfo(ctx context.Context, bucket string, object string, opts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
oi, err := l.Client.StatObject(bucket, object, miniogo.StatObjectOptions{
GetObjectOptions: miniogo.GetObjectOptions{
ServerSideEncryption: opts.ServerSideEncryption,
},
})
if err != nil {
return minio.ObjectInfo{}, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.FromMinioClientObjectInfo(bucket, oi), nil
}
// PutObject creates a new object with the incoming data,
func (l *s3Objects) PutObject(ctx context.Context, bucket string, object string, r *minio.PutObjReader, opts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
data := r.Reader
var tagMap map[string]string
if tagstr, ok := opts.UserDefined[xhttp.AmzObjectTagging]; ok && tagstr != "" {
tagObj, err := tags.ParseObjectTags(tagstr)
if err != nil {
return objInfo, minio.ErrorRespToObjectError(err, bucket, object)
}
tagMap = tagObj.ToMap()
delete(opts.UserDefined, xhttp.AmzObjectTagging)
}
putOpts := miniogo.PutObjectOptions{
UserMetadata: opts.UserDefined,
ServerSideEncryption: opts.ServerSideEncryption,
UserTags: tagMap,
}
oi, err := l.Client.PutObject(bucket, object, data, data.Size(), data.MD5Base64String(), data.SHA256HexString(), putOpts)
if err != nil {
return objInfo, minio.ErrorRespToObjectError(err, bucket, object)
}
// On success, populate the key & metadata so they are present in the notification
oi.Key = object
oi.Metadata = minio.ToMinioClientObjectInfoMetadata(opts.UserDefined)
return minio.FromMinioClientObjectInfo(bucket, oi), nil
}
// CopyObject copies an object from source bucket to a destination bucket.
func (l *s3Objects) CopyObject(ctx context.Context, srcBucket string, srcObject string, dstBucket string, dstObject string, srcInfo minio.ObjectInfo, srcOpts, dstOpts minio.ObjectOptions) (objInfo minio.ObjectInfo, err error) {
if srcOpts.CheckCopyPrecondFn != nil && srcOpts.CheckCopyPrecondFn(srcInfo, "") {
return minio.ObjectInfo{}, minio.PreConditionFailed{}
}
// Set this header such that following CopyObject() always sets the right metadata on the destination.
// metadata input is already a trickled down value from interpreting x-amz-metadata-directive at
// handler layer. So what we have right now is supposed to be applied on the destination object anyways.
// So preserve it by adding "REPLACE" directive to save all the metadata set by CopyObject API.
srcInfo.UserDefined["x-amz-metadata-directive"] = "REPLACE"
srcInfo.UserDefined["x-amz-copy-source-if-match"] = srcInfo.ETag
header := make(http.Header)
if srcOpts.ServerSideEncryption != nil {
encrypt.SSECopy(srcOpts.ServerSideEncryption).Marshal(header)
}
if dstOpts.ServerSideEncryption != nil {
dstOpts.ServerSideEncryption.Marshal(header)
}
for k, v := range header {
srcInfo.UserDefined[k] = v[0]
}
if _, err = l.Client.CopyObject(srcBucket, srcObject, dstBucket, dstObject, srcInfo.UserDefined); err != nil {
return objInfo, minio.ErrorRespToObjectError(err, srcBucket, srcObject)
}
return l.GetObjectInfo(ctx, dstBucket, dstObject, dstOpts)
}
// DeleteObject deletes a blob in bucket
func (l *s3Objects) DeleteObject(ctx context.Context, bucket string, object string) error {
err := l.Client.RemoveObject(bucket, object)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket, object)
}
return nil
}
func (l *s3Objects) DeleteObjects(ctx context.Context, bucket string, objects []string) ([]error, error) {
errs := make([]error, len(objects))
for idx, object := range objects {
errs[idx] = l.DeleteObject(ctx, bucket, object)
}
return errs, nil
}
// ListMultipartUploads lists all multipart uploads.
func (l *s3Objects) ListMultipartUploads(ctx context.Context, bucket string, prefix string, keyMarker string, uploadIDMarker string, delimiter string, maxUploads int) (lmi minio.ListMultipartsInfo, e error) {
result, err := l.Client.ListMultipartUploads(bucket, prefix, keyMarker, uploadIDMarker, delimiter, maxUploads)
if err != nil {
return lmi, err
}
return minio.FromMinioClientListMultipartsInfo(result), nil
}
// NewMultipartUpload upload object in multiple parts
func (l *s3Objects) NewMultipartUpload(ctx context.Context, bucket string, object string, o minio.ObjectOptions) (uploadID string, err error) {
var tagMap map[string]string
if tagStr, ok := o.UserDefined[xhttp.AmzObjectTagging]; ok {
tagObj, err := tags.Parse(tagStr, true)
if err != nil {
return uploadID, minio.ErrorRespToObjectError(err, bucket, object)
}
tagMap = tagObj.ToMap()
delete(o.UserDefined, xhttp.AmzObjectTagging)
}
// Create PutObject options
opts := miniogo.PutObjectOptions{
UserMetadata: o.UserDefined,
ServerSideEncryption: o.ServerSideEncryption,
UserTags: tagMap,
}
uploadID, err = l.Client.NewMultipartUpload(bucket, object, opts)
if err != nil {
return uploadID, minio.ErrorRespToObjectError(err, bucket, object)
}
return uploadID, nil
}
// PutObjectPart puts a part of object in bucket
func (l *s3Objects) PutObjectPart(ctx context.Context, bucket string, object string, uploadID string, partID int, r *minio.PutObjReader, opts minio.ObjectOptions) (pi minio.PartInfo, e error) {
data := r.Reader
info, err := l.Client.PutObjectPart(bucket, object, uploadID, partID, data, data.Size(), data.MD5Base64String(), data.SHA256HexString(), opts.ServerSideEncryption)
if err != nil {
return pi, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.FromMinioClientObjectPart(info), nil
}
// CopyObjectPart creates a part in a multipart upload by copying
// existing object or a part of it.
func (l *s3Objects) CopyObjectPart(ctx context.Context, srcBucket, srcObject, destBucket, destObject, uploadID string,
partID int, startOffset, length int64, srcInfo minio.ObjectInfo, srcOpts, dstOpts minio.ObjectOptions) (p minio.PartInfo, err error) {
if srcOpts.CheckCopyPrecondFn != nil && srcOpts.CheckCopyPrecondFn(srcInfo, "") {
return minio.PartInfo{}, minio.PreConditionFailed{}
}
srcInfo.UserDefined = map[string]string{
"x-amz-copy-source-if-match": srcInfo.ETag,
}
header := make(http.Header)
if srcOpts.ServerSideEncryption != nil {
encrypt.SSECopy(srcOpts.ServerSideEncryption).Marshal(header)
}
if dstOpts.ServerSideEncryption != nil {
dstOpts.ServerSideEncryption.Marshal(header)
}
for k, v := range header {
srcInfo.UserDefined[k] = v[0]
}
completePart, err := l.Client.CopyObjectPart(srcBucket, srcObject, destBucket, destObject,
uploadID, partID, startOffset, length, srcInfo.UserDefined)
if err != nil {
return p, minio.ErrorRespToObjectError(err, srcBucket, srcObject)
}
p.PartNumber = completePart.PartNumber
p.ETag = completePart.ETag
return p, nil
}
// GetMultipartInfo returns multipart info of the uploadId of the object
func (l *s3Objects) GetMultipartInfo(ctx context.Context, bucket, object, uploadID string, opts minio.ObjectOptions) (result minio.MultipartInfo, err error) {
result.Bucket = bucket
result.Object = object
result.UploadID = uploadID
return result, nil
}
// ListObjectParts returns all object parts for specified object in specified bucket
func (l *s3Objects) ListObjectParts(ctx context.Context, bucket string, object string, uploadID string, partNumberMarker int, maxParts int, opts minio.ObjectOptions) (lpi minio.ListPartsInfo, e error) {
result, err := l.Client.ListObjectParts(bucket, object, uploadID, partNumberMarker, maxParts)
if err != nil {
return lpi, err
}
lpi = minio.FromMinioClientListPartsInfo(result)
if lpi.IsTruncated && maxParts > len(lpi.Parts) {
partNumberMarker = lpi.NextPartNumberMarker
for {
result, err = l.Client.ListObjectParts(bucket, object, uploadID, partNumberMarker, maxParts)
if err != nil {
return lpi, err
}
nlpi := minio.FromMinioClientListPartsInfo(result)
partNumberMarker = nlpi.NextPartNumberMarker
lpi.Parts = append(lpi.Parts, nlpi.Parts...)
if !nlpi.IsTruncated {
break
}
}
}
return lpi, nil
}
// AbortMultipartUpload aborts a ongoing multipart upload
func (l *s3Objects) AbortMultipartUpload(ctx context.Context, bucket string, object string, uploadID string) error {
err := l.Client.AbortMultipartUpload(bucket, object, uploadID)
return minio.ErrorRespToObjectError(err, bucket, object)
}
// CompleteMultipartUpload completes ongoing multipart upload and finalizes object
func (l *s3Objects) CompleteMultipartUpload(ctx context.Context, bucket string, object string, uploadID string, uploadedParts []minio.CompletePart, opts minio.ObjectOptions) (oi minio.ObjectInfo, e error) {
etag, err := l.Client.CompleteMultipartUpload(bucket, object, uploadID, minio.ToMinioClientCompleteParts(uploadedParts))
if err != nil {
return oi, minio.ErrorRespToObjectError(err, bucket, object)
}
return minio.ObjectInfo{Bucket: bucket, Name: object, ETag: strings.Trim(etag, "\"")}, nil
}
// SetBucketPolicy sets policy on bucket
func (l *s3Objects) SetBucketPolicy(ctx context.Context, bucket string, bucketPolicy *policy.Policy) error {
data, err := json.Marshal(bucketPolicy)
if err != nil {
// This should not happen.
logger.LogIf(ctx, err)
return minio.ErrorRespToObjectError(err, bucket)
}
if err := l.Client.SetBucketPolicy(bucket, string(data)); err != nil {
return minio.ErrorRespToObjectError(err, bucket)
}
return nil
}
// GetBucketPolicy will get policy on bucket
func (l *s3Objects) GetBucketPolicy(ctx context.Context, bucket string) (*policy.Policy, error) {
data, err := l.Client.GetBucketPolicy(bucket)
if err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket)
}
bucketPolicy, err := policy.ParseConfig(strings.NewReader(data), bucket)
return bucketPolicy, minio.ErrorRespToObjectError(err, bucket)
}
// DeleteBucketPolicy deletes all policies on bucket
func (l *s3Objects) DeleteBucketPolicy(ctx context.Context, bucket string) error {
if err := l.Client.SetBucketPolicy(bucket, ""); err != nil {
return minio.ErrorRespToObjectError(err, bucket, "")
}
return nil
}
// GetObjectTags gets the tags set on the object
func (l *s3Objects) GetObjectTags(ctx context.Context, bucket string, object string) (*tags.Tags, error) {
var err error
var tagObj *tags.Tags
var tagStr string
var opts minio.ObjectOptions
if _, err = l.GetObjectInfo(ctx, bucket, object, opts); err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
if tagStr, err = l.Client.GetObjectTagging(bucket, object); err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
if tagObj, err = tags.ParseObjectXML(strings.NewReader(tagStr)); err != nil {
return nil, minio.ErrorRespToObjectError(err, bucket, object)
}
return tagObj, err
}
// PutObjectTags attaches the tags to the object
func (l *s3Objects) PutObjectTags(ctx context.Context, bucket, object string, tagStr string) error {
tagObj, err := tags.Parse(tagStr, true)
if err != nil {
return minio.ErrorRespToObjectError(err, bucket, object)
}
if err = l.Client.PutObjectTagging(bucket, object, tagObj.ToMap()); err != nil {
return minio.ErrorRespToObjectError(err, bucket, object)
}
return nil
}
// DeleteObjectTags removes the tags attached to the object
func (l *s3Objects) DeleteObjectTags(ctx context.Context, bucket, object string) error {
if err := l.Client.RemoveObjectTagging(bucket, object); err != nil {
return minio.ErrorRespToObjectError(err, bucket, object)
}
return nil
}
// IsCompressionSupported returns whether compression is applicable for this layer.
func (l *s3Objects) IsCompressionSupported() bool {
return false
}
// IsEncryptionSupported returns whether server side encryption is implemented for this layer.
func (l *s3Objects) IsEncryptionSupported() bool {
return minio.GlobalKMS != nil || len(minio.GlobalGatewaySSE) > 0
}
// IsReady returns whether the layer is ready to take requests.
func (l *s3Objects) IsReady(ctx context.Context) bool {
return minio.IsBackendOnline(ctx, l.HTTPClient, l.Client.EndpointURL().String())
}
func (l *s3Objects) IsTaggingSupported() bool {
return true
}
|
harshavardhana/minio
|
cmd/gateway/s3/gateway-s3.go
|
GO
|
apache-2.0
| 26,308 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.5-b10
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.06.13 at 03:17:43 PM CST
//
package org.ovirt.engine.api.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Feature complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Feature">
* <complexContent>
* <extension base="{}BaseResource">
* <sequence>
* <element ref="{}transparent_hugepages" minOccurs="0"/>
* <element ref="{}gluster_volumes" minOccurs="0"/>
* <element ref="{}vm_device_types" minOccurs="0"/>
* <element ref="{}storage_types" minOccurs="0"/>
* <element ref="{}storage_domain" minOccurs="0"/>
* <element ref="{}nic" minOccurs="0"/>
* <element ref="{}api" minOccurs="0"/>
* <element ref="{}host" minOccurs="0"/>
* <element ref="{}url" minOccurs="0"/>
* <element ref="{}headers" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Feature", propOrder = {
"transparentHugepages",
"glusterVolumes",
"vmDeviceTypes",
"storageTypes",
"storageDomain",
"nic",
"api",
"host",
"url",
"headers"
})
public class Feature
extends BaseResource
{
@XmlElement(name = "transparent_hugepages")
protected TransparentHugePages transparentHugepages;
@XmlElement(name = "gluster_volumes")
protected GlusterVolumes glusterVolumes;
@XmlElement(name = "vm_device_types")
protected VmDeviceTypes vmDeviceTypes;
@XmlElement(name = "storage_types")
protected StorageTypes storageTypes;
@XmlElement(name = "storage_domain")
protected StorageDomain storageDomain;
protected NIC nic;
protected API api;
protected Host host;
protected Url url;
protected Headers headers;
/**
* Gets the value of the transparentHugepages property.
*
* @return
* possible object is
* {@link TransparentHugePages }
*
*/
public TransparentHugePages getTransparentHugepages() {
return transparentHugepages;
}
/**
* Sets the value of the transparentHugepages property.
*
* @param value
* allowed object is
* {@link TransparentHugePages }
*
*/
public void setTransparentHugepages(TransparentHugePages value) {
this.transparentHugepages = value;
}
public boolean isSetTransparentHugepages() {
return (this.transparentHugepages!= null);
}
/**
* Gets the value of the glusterVolumes property.
*
* @return
* possible object is
* {@link GlusterVolumes }
*
*/
public GlusterVolumes getGlusterVolumes() {
return glusterVolumes;
}
/**
* Sets the value of the glusterVolumes property.
*
* @param value
* allowed object is
* {@link GlusterVolumes }
*
*/
public void setGlusterVolumes(GlusterVolumes value) {
this.glusterVolumes = value;
}
public boolean isSetGlusterVolumes() {
return (this.glusterVolumes!= null);
}
/**
* Gets the value of the vmDeviceTypes property.
*
* @return
* possible object is
* {@link VmDeviceTypes }
*
*/
public VmDeviceTypes getVmDeviceTypes() {
return vmDeviceTypes;
}
/**
* Sets the value of the vmDeviceTypes property.
*
* @param value
* allowed object is
* {@link VmDeviceTypes }
*
*/
public void setVmDeviceTypes(VmDeviceTypes value) {
this.vmDeviceTypes = value;
}
public boolean isSetVmDeviceTypes() {
return (this.vmDeviceTypes!= null);
}
/**
* Gets the value of the storageTypes property.
*
* @return
* possible object is
* {@link StorageTypes }
*
*/
public StorageTypes getStorageTypes() {
return storageTypes;
}
/**
* Sets the value of the storageTypes property.
*
* @param value
* allowed object is
* {@link StorageTypes }
*
*/
public void setStorageTypes(StorageTypes value) {
this.storageTypes = value;
}
public boolean isSetStorageTypes() {
return (this.storageTypes!= null);
}
/**
* Gets the value of the storageDomain property.
*
* @return
* possible object is
* {@link StorageDomain }
*
*/
public StorageDomain getStorageDomain() {
return storageDomain;
}
/**
* Sets the value of the storageDomain property.
*
* @param value
* allowed object is
* {@link StorageDomain }
*
*/
public void setStorageDomain(StorageDomain value) {
this.storageDomain = value;
}
public boolean isSetStorageDomain() {
return (this.storageDomain!= null);
}
/**
* Gets the value of the nic property.
*
* @return
* possible object is
* {@link NIC }
*
*/
public NIC getNic() {
return nic;
}
/**
* Sets the value of the nic property.
*
* @param value
* allowed object is
* {@link NIC }
*
*/
public void setNic(NIC value) {
this.nic = value;
}
public boolean isSetNic() {
return (this.nic!= null);
}
/**
* Gets the value of the api property.
*
* @return
* possible object is
* {@link API }
*
*/
public API getApi() {
return api;
}
/**
* Sets the value of the api property.
*
* @param value
* allowed object is
* {@link API }
*
*/
public void setApi(API value) {
this.api = value;
}
public boolean isSetApi() {
return (this.api!= null);
}
/**
* Gets the value of the host property.
*
* @return
* possible object is
* {@link Host }
*
*/
public Host getHost() {
return host;
}
/**
* Sets the value of the host property.
*
* @param value
* allowed object is
* {@link Host }
*
*/
public void setHost(Host value) {
this.host = value;
}
public boolean isSetHost() {
return (this.host!= null);
}
/**
* Gets the value of the url property.
*
* @return
* possible object is
* {@link Url }
*
*/
public Url getUrl() {
return url;
}
/**
* Sets the value of the url property.
*
* @param value
* allowed object is
* {@link Url }
*
*/
public void setUrl(Url value) {
this.url = value;
}
public boolean isSetUrl() {
return (this.url!= null);
}
/**
* Gets the value of the headers property.
*
* @return
* possible object is
* {@link Headers }
*
*/
public Headers getHeaders() {
return headers;
}
/**
* Sets the value of the headers property.
*
* @param value
* allowed object is
* {@link Headers }
*
*/
public void setHeaders(Headers value) {
this.headers = value;
}
public boolean isSetHeaders() {
return (this.headers!= null);
}
}
|
phoenixsbk/kvmmgr
|
backend/manager/modules/restapi/interface/definition/xjc/org/ovirt/engine/api/model/Feature.java
|
Java
|
apache-2.0
| 8,468 |
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import range
from murano.dsl import helpers
class Object(object):
def __init__(self, __name, **kwargs):
self.data = {
'?': {
'type': __name,
'id': helpers.generate_id()
}
}
self.data.update(kwargs)
@property
def id(self):
return self.data['?']['id']
@property
def type_name(self):
return self.data['?']['type']
class Attribute(object):
def __init__(self, obj, key, value):
self._value = value
self._key = key
self._obj = obj
@property
def obj(self):
return self._obj
@property
def key(self):
return self._key
@property
def value(self):
return self._value
class Ref(object):
def __init__(self, obj):
self._id = obj.id
@property
def id(self):
return self._id
def build_model(root):
if isinstance(root, dict):
for key, value in root.items():
root[key] = build_model(value)
elif isinstance(root, list):
for i in range(len(root)):
root[i] = build_model(root[i])
elif isinstance(root, Object):
return build_model(root.data)
elif isinstance(root, Ref):
return root.id
elif isinstance(root, Attribute):
return [root.obj.id, root.obj.type_name, root.key, root.value]
return root
|
olivierlemasle/murano
|
murano/tests/unit/dsl/foundation/object_model.py
|
Python
|
apache-2.0
| 2,019 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNvl;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.NullWritable;
import org.apache.hive.common.util.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
/**
* The Factory for creating typecheck processors. The typecheck processors are
* used to processes the syntax trees for expressions and convert them into
* expression Node Descriptor trees. They also introduce the correct conversion
* functions to do proper implicit conversion.
*/
public class TypeCheckProcFactory {
protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckProcFactory.class
.getName());
protected TypeCheckProcFactory() {
// prevent instantiation
}
/**
* Function to do groupby subexpression elimination. This is called by all the
* processors initially. As an example, consider the query select a+b,
* count(1) from T group by a+b; Then a+b is already precomputed in the group
* by operators key, so we substitute a+b in the select list with the internal
* column name of the a+b expression that appears in the in input row
* resolver.
*
* @param nd
* The node that is being inspected.
* @param procCtx
* The processor context.
*
* @return exprNodeColumnDesc.
*/
public static ExprNodeDesc processGByExpr(Node nd, Object procCtx)
throws SemanticException {
// We recursively create the exprNodeDesc. Base cases: when we encounter
// a column ref, we convert that into an exprNodeColumnDesc; when we
// encounter
// a constant, we convert that into an exprNodeConstantDesc. For others we
// just
// build the exprNodeFuncDesc with recursively built children.
ASTNode expr = (ASTNode) nd;
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (!ctx.isUseCaching()) {
return null;
}
RowResolver input = ctx.getInputRR();
ExprNodeDesc desc = null;
if ((ctx == null) || (input == null) || (!ctx.getAllowGBExprElimination())) {
return null;
}
// If the current subExpression is pre-calculated, as in Group-By etc.
ColumnInfo colInfo = input.getExpression(expr);
if (colInfo != null) {
desc = new ExprNodeColumnDesc(colInfo);
ASTNode source = input.getExpressionSource(expr);
if (source != null) {
ctx.getUnparseTranslator().addCopyTranslation(expr, source);
}
return desc;
}
return desc;
}
public static Map<ASTNode, ExprNodeDesc> genExprNode(ASTNode expr, TypeCheckCtx tcCtx)
throws SemanticException {
return genExprNode(expr, tcCtx, new TypeCheckProcFactory());
}
protected static Map<ASTNode, ExprNodeDesc> genExprNode(ASTNode expr,
TypeCheckCtx tcCtx, TypeCheckProcFactory tf) throws SemanticException {
// Create the walker, the rules dispatcher and the context.
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", HiveParser.TOK_NULL + "%"),
tf.getNullExprProcessor());
opRules.put(new RuleRegExp("R2", HiveParser.Number + "%|" +
HiveParser.TinyintLiteral + "%|" +
HiveParser.SmallintLiteral + "%|" +
HiveParser.BigintLiteral + "%|" +
HiveParser.DecimalLiteral + "%"),
tf.getNumExprProcessor());
opRules
.put(new RuleRegExp("R3", HiveParser.Identifier + "%|"
+ HiveParser.StringLiteral + "%|" + HiveParser.TOK_CHARSETLITERAL + "%|"
+ HiveParser.TOK_STRINGLITERALSEQUENCE + "%|"
+ "%|" + HiveParser.KW_IF + "%|" + HiveParser.KW_CASE + "%|"
+ HiveParser.KW_WHEN + "%|" + HiveParser.KW_IN + "%|"
+ HiveParser.KW_ARRAY + "%|" + HiveParser.KW_MAP + "%|"
+ HiveParser.KW_STRUCT + "%|" + HiveParser.KW_EXISTS + "%|"
+ HiveParser.TOK_SUBQUERY_OP_NOTIN + "%"),
tf.getStrExprProcessor());
opRules.put(new RuleRegExp("R4", HiveParser.KW_TRUE + "%|"
+ HiveParser.KW_FALSE + "%"), tf.getBoolExprProcessor());
opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%|"
+ HiveParser.TOK_TIMESTAMPLITERAL + "%"), tf.getDateTimeExprProcessor());
opRules.put(new RuleRegExp("R6",
HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_YEAR_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_MONTH_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_DAY_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_HOUR_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_MINUTE_LITERAL + "%|"
+ HiveParser.TOK_INTERVAL_SECOND_LITERAL + "%"), tf.getIntervalExprProcessor());
opRules.put(new RuleRegExp("R7", HiveParser.TOK_TABLE_OR_COL + "%"),
tf.getColumnExprProcessor());
opRules.put(new RuleRegExp("R8", HiveParser.TOK_SUBQUERY_OP + "%"),
tf.getSubQueryExprProcessor());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(tf.getDefaultExprProcessor(),
opRules, tcCtx);
GraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of top nodes
ArrayList<Node> topNodes = Lists.<Node>newArrayList(expr);
HashMap<Node, Object> nodeOutputs = new LinkedHashMap<Node, Object>();
ogw.startWalking(topNodes, nodeOutputs);
return convert(nodeOutputs);
}
// temporary type-safe casting
private static Map<ASTNode, ExprNodeDesc> convert(Map<Node, Object> outputs) {
Map<ASTNode, ExprNodeDesc> converted = new LinkedHashMap<ASTNode, ExprNodeDesc>();
for (Map.Entry<Node, Object> entry : outputs.entrySet()) {
if (entry.getKey() instanceof ASTNode &&
(entry.getValue() == null || entry.getValue() instanceof ExprNodeDesc)) {
converted.put((ASTNode)entry.getKey(), (ExprNodeDesc)entry.getValue());
} else {
LOG.warn("Invalid type entry " + entry);
}
}
return converted;
}
/**
* Processor for processing NULL expression.
*/
public static class NullExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
return new ExprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class), null);
}
}
/**
* Factory method to get NullExprProcessor.
*
* @return NullExprProcessor.
*/
public NullExprProcessor getNullExprProcessor() {
return new NullExprProcessor();
}
/**
* Processor for processing numeric constants.
*/
public static class NumExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
Number v = null;
ASTNode expr = (ASTNode) nd;
// The expression can be any one of Double, Long and Integer. We
// try to parse the expression in that order to ensure that the
// most specific type is used for conversion.
try {
if (expr.getText().endsWith("L")) {
// Literal bigint.
v = Long.valueOf(expr.getText().substring(
0, expr.getText().length() - 1));
} else if (expr.getText().endsWith("S")) {
// Literal smallint.
v = Short.valueOf(expr.getText().substring(
0, expr.getText().length() - 1));
} else if (expr.getText().endsWith("Y")) {
// Literal tinyint.
v = Byte.valueOf(expr.getText().substring(
0, expr.getText().length() - 1));
} else if (expr.getText().endsWith("BD")) {
// Literal decimal
String strVal = expr.getText().substring(0, expr.getText().length() - 2);
HiveDecimal hd = HiveDecimal.create(strVal);
int prec = 1;
int scale = 0;
if (hd != null) {
prec = hd.precision();
scale = hd.scale();
}
DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
return new ExprNodeConstantDesc(typeInfo, hd);
} else {
v = Double.valueOf(expr.getText());
v = Long.valueOf(expr.getText());
v = Integer.valueOf(expr.getText());
}
} catch (NumberFormatException e) {
// do nothing here, we will throw an exception in the following block
}
if (v == null) {
throw new SemanticException(ErrorMsg.INVALID_NUMERICAL_CONSTANT
.getMsg(expr));
}
return new ExprNodeConstantDesc(v);
}
}
/**
* Factory method to get NumExprProcessor.
*
* @return NumExprProcessor.
*/
public NumExprProcessor getNumExprProcessor() {
return new NumExprProcessor();
}
/**
* Processor for processing string constants.
*/
public static class StrExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
ASTNode expr = (ASTNode) nd;
String str = null;
switch (expr.getToken().getType()) {
case HiveParser.StringLiteral:
str = BaseSemanticAnalyzer.unescapeSQLString(expr.getText());
break;
case HiveParser.TOK_STRINGLITERALSEQUENCE:
StringBuilder sb = new StringBuilder();
for (Node n : expr.getChildren()) {
sb.append(
BaseSemanticAnalyzer.unescapeSQLString(((ASTNode)n).getText()));
}
str = sb.toString();
break;
case HiveParser.TOK_CHARSETLITERAL:
str = BaseSemanticAnalyzer.charSetString(expr.getChild(0).getText(),
expr.getChild(1).getText());
break;
default:
// HiveParser.identifier | HiveParse.KW_IF | HiveParse.KW_LEFT |
// HiveParse.KW_RIGHT
str = BaseSemanticAnalyzer.unescapeIdentifier(expr.getText().toLowerCase());
break;
}
return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, str);
}
}
/**
* Factory method to get StrExprProcessor.
*
* @return StrExprProcessor.
*/
public StrExprProcessor getStrExprProcessor() {
return new StrExprProcessor();
}
/**
* Processor for boolean constants.
*/
public static class BoolExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
ASTNode expr = (ASTNode) nd;
Boolean bool = null;
switch (expr.getToken().getType()) {
case HiveParser.KW_TRUE:
bool = Boolean.TRUE;
break;
case HiveParser.KW_FALSE:
bool = Boolean.FALSE;
break;
default:
assert false;
}
return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, bool);
}
}
/**
* Factory method to get BoolExprProcessor.
*
* @return BoolExprProcessor.
*/
public BoolExprProcessor getBoolExprProcessor() {
return new BoolExprProcessor();
}
/**
* Processor for date constants.
*/
public static class DateTimeExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
ASTNode expr = (ASTNode) nd;
String timeString = BaseSemanticAnalyzer.stripQuotes(expr.getText());
// Get the string value and convert to a Date value.
try {
// todo replace below with joda-time, which supports timezone
if (expr.getType() == HiveParser.TOK_DATELITERAL) {
PrimitiveTypeInfo typeInfo = TypeInfoFactory.dateTypeInfo;
return new ExprNodeConstantDesc(typeInfo,
Date.valueOf(timeString));
}
if (expr.getType() == HiveParser.TOK_TIMESTAMPLITERAL) {
return new ExprNodeConstantDesc(TypeInfoFactory.timestampTypeInfo,
Timestamp.valueOf(timeString));
}
throw new IllegalArgumentException("Invalid time literal type " + expr.getType());
} catch (Exception err) {
throw new SemanticException(
"Unable to convert time literal '" + timeString + "' to time value.", err);
}
}
}
/**
* Factory method to get DateExprProcessor.
*
* @return DateExprProcessor.
*/
public DateTimeExprProcessor getDateTimeExprProcessor() {
return new DateTimeExprProcessor();
}
/**
* Processor for interval constants.
*/
public static class IntervalExprProcessor implements NodeProcessor {
private static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(DateUtils.NANOS_PER_SEC);
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
ASTNode expr = (ASTNode) nd;
String intervalString = BaseSemanticAnalyzer.stripQuotes(expr.getText());
// Get the string value and convert to a Interval value.
try {
switch (expr.getType()) {
case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
HiveIntervalYearMonth.valueOf(intervalString));
case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
HiveIntervalDayTime.valueOf(intervalString));
case HiveParser.TOK_INTERVAL_YEAR_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(Integer.parseInt(intervalString), 0));
case HiveParser.TOK_INTERVAL_MONTH_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(0, Integer.parseInt(intervalString)));
case HiveParser.TOK_INTERVAL_DAY_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(Integer.parseInt(intervalString), 0, 0, 0, 0));
case HiveParser.TOK_INTERVAL_HOUR_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, Integer.parseInt(intervalString), 0, 0, 0));
case HiveParser.TOK_INTERVAL_MINUTE_LITERAL:
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, Integer.parseInt(intervalString), 0, 0));
case HiveParser.TOK_INTERVAL_SECOND_LITERAL:
BigDecimal bd = new BigDecimal(intervalString);
BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger());
BigDecimal bdNanos = bd.subtract(bdSeconds);
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(),
bdNanos.multiply(NANOS_PER_SEC_BD).intValue()));
default:
throw new IllegalArgumentException("Invalid time literal type " + expr.getType());
}
} catch (Exception err) {
throw new SemanticException(
"Unable to convert interval literal '" + intervalString + "' to interval value.", err);
}
}
}
/**
* Factory method to get IntervalExprProcessor.
*
* @return IntervalExprProcessor.
*/
public IntervalExprProcessor getIntervalExprProcessor() {
return new IntervalExprProcessor();
}
/**
* Processor for table columns.
*/
public static class ColumnExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
ASTNode expr = (ASTNode) nd;
ASTNode parent = stack.size() > 1 ? (ASTNode) stack.get(stack.size() - 2) : null;
RowResolver input = ctx.getInputRR();
if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) {
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr), expr);
return null;
}
assert (expr.getChildCount() == 1);
String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr
.getChild(0).getText());
boolean isTableAlias = input.hasTableAlias(tableOrCol);
ColumnInfo colInfo = input.get(null, tableOrCol);
if (isTableAlias) {
if (colInfo != null) {
if (parent != null && parent.getType() == HiveParser.DOT) {
// It's a table alias.
return null;
}
// It's a column.
return toExprNodeDesc(colInfo);
} else {
// It's a table alias.
// We will process that later in DOT.
return null;
}
} else {
if (colInfo == null) {
// It's not a column or a table alias.
if (input.getIsExprResolver()) {
ASTNode exprNode = expr;
if (!stack.empty()) {
ASTNode tmp = (ASTNode) stack.pop();
if (!stack.empty()) {
exprNode = (ASTNode) stack.peek();
}
stack.push(tmp);
}
ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(exprNode), expr);
return null;
} else {
List<String> possibleColumnNames = input.getReferenceableColumnAliases(tableOrCol, -1);
String reason = String.format("(possible column names are: %s)",
StringUtils.join(possibleColumnNames, ", "));
ctx.setError(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(expr.getChild(0), reason),
expr);
LOG.debug(ErrorMsg.INVALID_TABLE_OR_COLUMN.toString() + ":"
+ input.toString());
return null;
}
} else {
// It's a column.
return toExprNodeDesc(colInfo);
}
}
}
}
private static ExprNodeDesc toExprNodeDesc(ColumnInfo colInfo) {
ObjectInspector inspector = colInfo.getObjectInspector();
if (inspector instanceof ConstantObjectInspector &&
inspector instanceof PrimitiveObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
return new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
}
// non-constant or non-primitive constants
ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo);
column.setSkewedCol(colInfo.isSkewedCol());
return column;
}
/**
* Factory method to get ColumnExprProcessor.
*
* @return ColumnExprProcessor.
*/
public ColumnExprProcessor getColumnExprProcessor() {
return new ColumnExprProcessor();
}
/**
* The default processor for typechecking.
*/
public static class DefaultExprProcessor implements NodeProcessor {
static HashMap<Integer, String> specialUnaryOperatorTextHashMap;
static HashMap<Integer, String> specialFunctionTextHashMap;
static HashMap<Integer, String> conversionFunctionTextHashMap;
static HashSet<Integer> windowingTokens;
static {
specialUnaryOperatorTextHashMap = new HashMap<Integer, String>();
specialUnaryOperatorTextHashMap.put(HiveParser.PLUS, "positive");
specialUnaryOperatorTextHashMap.put(HiveParser.MINUS, "negative");
specialFunctionTextHashMap = new HashMap<Integer, String>();
specialFunctionTextHashMap.put(HiveParser.TOK_ISNULL, "isnull");
specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull");
conversionFunctionTextHashMap = new HashMap<Integer, String>();
conversionFunctionTextHashMap.put(HiveParser.TOK_BOOLEAN,
serdeConstants.BOOLEAN_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_TINYINT,
serdeConstants.TINYINT_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_SMALLINT,
serdeConstants.SMALLINT_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INT,
serdeConstants.INT_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_BIGINT,
serdeConstants.BIGINT_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_FLOAT,
serdeConstants.FLOAT_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_DOUBLE,
serdeConstants.DOUBLE_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_STRING,
serdeConstants.STRING_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_CHAR,
serdeConstants.CHAR_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_VARCHAR,
serdeConstants.VARCHAR_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_BINARY,
serdeConstants.BINARY_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_DATE,
serdeConstants.DATE_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
serdeConstants.TIMESTAMP_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,
serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_DECIMAL,
serdeConstants.DECIMAL_TYPE_NAME);
windowingTokens = new HashSet<Integer>();
windowingTokens.add(HiveParser.KW_OVER);
windowingTokens.add(HiveParser.TOK_PARTITIONINGSPEC);
windowingTokens.add(HiveParser.TOK_DISTRIBUTEBY);
windowingTokens.add(HiveParser.TOK_SORTBY);
windowingTokens.add(HiveParser.TOK_CLUSTERBY);
windowingTokens.add(HiveParser.TOK_WINDOWSPEC);
windowingTokens.add(HiveParser.TOK_WINDOWRANGE);
windowingTokens.add(HiveParser.TOK_WINDOWVALUES);
windowingTokens.add(HiveParser.KW_UNBOUNDED);
windowingTokens.add(HiveParser.KW_PRECEDING);
windowingTokens.add(HiveParser.KW_FOLLOWING);
windowingTokens.add(HiveParser.KW_CURRENT);
windowingTokens.add(HiveParser.TOK_TABSORTCOLNAMEASC);
windowingTokens.add(HiveParser.TOK_TABSORTCOLNAMEDESC);
windowingTokens.add(HiveParser.TOK_NULLS_FIRST);
windowingTokens.add(HiveParser.TOK_NULLS_LAST);
}
protected static boolean isRedundantConversionFunction(ASTNode expr,
boolean isFunction, ArrayList<ExprNodeDesc> children) {
if (!isFunction) {
return false;
}
// conversion functions take a single parameter
if (children.size() != 1) {
return false;
}
String funcText = conversionFunctionTextHashMap.get(((ASTNode) expr
.getChild(0)).getType());
// not a conversion function
if (funcText == null) {
return false;
}
// return true when the child type and the conversion target type is the
// same
return ((PrimitiveTypeInfo) children.get(0).getTypeInfo()).getTypeName()
.equalsIgnoreCase(funcText);
}
public static String getFunctionText(ASTNode expr, boolean isFunction) {
String funcText = null;
if (!isFunction) {
// For operator, the function name is the operator text, unless it's in
// our special dictionary
if (expr.getChildCount() == 1) {
funcText = specialUnaryOperatorTextHashMap.get(expr.getType());
}
if (funcText == null) {
funcText = expr.getText();
}
} else {
// For TOK_FUNCTION, the function name is stored in the first child,
// unless it's in our
// special dictionary.
assert (expr.getChildCount() >= 1);
int funcType = ((ASTNode) expr.getChild(0)).getType();
funcText = specialFunctionTextHashMap.get(funcType);
if (funcText == null) {
funcText = conversionFunctionTextHashMap.get(funcType);
}
if (funcText == null) {
funcText = ((ASTNode) expr.getChild(0)).getText();
}
}
return BaseSemanticAnalyzer.unescapeIdentifier(funcText);
}
/**
* This function create an ExprNodeDesc for a UDF function given the
* children (arguments). It will insert implicit type conversion functions
* if necessary.
*
* @throws UDFArgumentException
*/
static ExprNodeDesc getFuncExprNodeDescWithUdfData(String udfName, TypeInfo typeInfo,
ExprNodeDesc... children) throws UDFArgumentException {
FunctionInfo fi;
try {
fi = FunctionRegistry.getFunctionInfo(udfName);
} catch (SemanticException e) {
throw new UDFArgumentException(e);
}
if (fi == null) {
throw new UDFArgumentException(udfName + " not found.");
}
GenericUDF genericUDF = fi.getGenericUDF();
if (genericUDF == null) {
throw new UDFArgumentException(udfName
+ " is an aggregation function or a table function.");
}
// Add udfData to UDF if necessary
if (typeInfo != null) {
if (genericUDF instanceof SettableUDF) {
((SettableUDF)genericUDF).setTypeInfo(typeInfo);
}
}
List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(children.length);
childrenList.addAll(Arrays.asList(children));
return ExprNodeGenericFuncDesc.newInstance(genericUDF,
childrenList);
}
public static ExprNodeDesc getFuncExprNodeDesc(String udfName,
ExprNodeDesc... children) throws UDFArgumentException {
return getFuncExprNodeDescWithUdfData(udfName, null, children);
}
protected void validateUDF(ASTNode expr, boolean isFunction, TypeCheckCtx ctx, FunctionInfo fi,
List<ExprNodeDesc> children, GenericUDF genericUDF) throws SemanticException {
// Detect UDTF's in nested SELECT, GROUP BY, etc as they aren't
// supported
if (fi.getGenericUDTF() != null) {
throw new SemanticException(ErrorMsg.UDTF_INVALID_LOCATION.getMsg());
}
// UDAF in filter condition, group-by caluse, param of funtion, etc.
if (fi.getGenericUDAFResolver() != null) {
if (isFunction) {
throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.getMsg((ASTNode) expr
.getChild(0)));
} else {
throw new SemanticException(ErrorMsg.UDAF_INVALID_LOCATION.getMsg(expr));
}
}
if (!ctx.getAllowStatefulFunctions() && (genericUDF != null)) {
if (FunctionRegistry.isStateful(genericUDF)) {
throw new SemanticException(ErrorMsg.UDF_STATEFUL_INVALID_LOCATION.getMsg());
}
}
}
protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
boolean isFunction, ArrayList<ExprNodeDesc> children, TypeCheckCtx ctx)
throws SemanticException, UDFArgumentException {
// return the child directly if the conversion is redundant.
if (isRedundantConversionFunction(expr, isFunction, children)) {
assert (children.size() == 1);
assert (children.get(0) != null);
return children.get(0);
}
String funcText = getFunctionText(expr, isFunction);
ExprNodeDesc desc;
if (funcText.equals(".")) {
// "." : FIELD Expression
assert (children.size() == 2);
// Only allow constant field name for now
assert (children.get(1) instanceof ExprNodeConstantDesc);
ExprNodeDesc object = children.get(0);
ExprNodeConstantDesc fieldName = (ExprNodeConstantDesc) children.get(1);
assert (fieldName.getValue() instanceof String);
// Calculate result TypeInfo
String fieldNameString = (String) fieldName.getValue();
TypeInfo objectTypeInfo = object.getTypeInfo();
// Allow accessing a field of list element structs directly from a list
boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
if (isList) {
objectTypeInfo = ((ListTypeInfo) objectTypeInfo).getListElementTypeInfo();
}
if (objectTypeInfo.getCategory() != Category.STRUCT) {
throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr));
}
TypeInfo t = ((StructTypeInfo) objectTypeInfo).getStructFieldTypeInfo(fieldNameString);
if (isList) {
t = TypeInfoFactory.getListTypeInfo(t);
}
desc = new ExprNodeFieldDesc(t, children.get(0), fieldNameString, isList);
} else if (funcText.equals("[")) {
// "[]" : LSQUARE/INDEX Expression
if (!ctx.getallowIndexExpr())
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(expr));
assert (children.size() == 2);
// Check whether this is a list or a map
TypeInfo myt = children.get(0).getTypeInfo();
if (myt.getCategory() == Category.LIST) {
// Only allow integer index for now
if (!TypeInfoUtils.implicitConvertible(children.get(1).getTypeInfo(),
TypeInfoFactory.intTypeInfo)) {
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(
expr, ErrorMsg.INVALID_ARRAYINDEX_TYPE.getMsg()));
}
// Calculate TypeInfo
TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry.getGenericUDFForIndex(), children);
} else if (myt.getCategory() == Category.MAP) {
if (!TypeInfoUtils.implicitConvertible(children.get(1).getTypeInfo(),
((MapTypeInfo) myt).getMapKeyTypeInfo())) {
throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE
.getMsg(expr));
}
// Calculate TypeInfo
TypeInfo t = ((MapTypeInfo) myt).getMapValueTypeInfo();
desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry.getGenericUDFForIndex(), children);
} else {
throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr, myt.getTypeName()));
}
} else {
// other operators or functions
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcText);
if (fi == null) {
if (isFunction) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION
.getMsg((ASTNode) expr.getChild(0)));
} else {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(expr));
}
}
// getGenericUDF() actually clones the UDF. Just call it once and reuse.
GenericUDF genericUDF = fi.getGenericUDF();
if (!fi.isNative()) {
ctx.getUnparseTranslator().addIdentifierTranslation(
(ASTNode) expr.getChild(0));
}
// Handle type casts that may contain type parameters
if (isFunction) {
ASTNode funcNameNode = (ASTNode)expr.getChild(0);
switch (funcNameNode.getType()) {
case HiveParser.TOK_CHAR:
// Add type params
CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(funcNameNode);
if (genericUDF != null) {
((SettableUDF)genericUDF).setTypeInfo(charTypeInfo);
}
break;
case HiveParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(funcNameNode);
if (genericUDF != null) {
((SettableUDF)genericUDF).setTypeInfo(varcharTypeInfo);
}
break;
case HiveParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(funcNameNode);
if (genericUDF != null) {
((SettableUDF)genericUDF).setTypeInfo(decTypeInfo);
}
break;
default:
// Do nothing
break;
}
}
validateUDF(expr, isFunction, ctx, fi, children, genericUDF);
// Try to infer the type of the constant only if there are two
// nodes, one of them is column and the other is numeric const
if (genericUDF instanceof GenericUDFBaseCompare
&& children.size() == 2
&& ((children.get(0) instanceof ExprNodeConstantDesc
&& children.get(1) instanceof ExprNodeColumnDesc)
|| (children.get(0) instanceof ExprNodeColumnDesc
&& children.get(1) instanceof ExprNodeConstantDesc))) {
int constIdx =
children.get(0) instanceof ExprNodeConstantDesc ? 0 : 1;
String constType = children.get(constIdx).getTypeString().toLowerCase();
String columnType = children.get(1 - constIdx).getTypeString().toLowerCase();
final PrimitiveTypeInfo colTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(columnType);
// Try to narrow type of constant
Object constVal = ((ExprNodeConstantDesc) children.get(constIdx)).getValue();
try {
if (PrimitiveObjectInspectorUtils.intTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Integer(constVal.toString())));
} else if (PrimitiveObjectInspectorUtils.longTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Long(constVal.toString())));
}else if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Double(constVal.toString())));
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Float(constVal.toString())));
} else if (PrimitiveObjectInspectorUtils.byteTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Byte(constVal.toString())));
} else if (PrimitiveObjectInspectorUtils.shortTypeEntry.equals(colTypeInfo.getPrimitiveTypeEntry()) && (constVal instanceof Number || constVal instanceof String)) {
children.set(constIdx, new ExprNodeConstantDesc(new Short(constVal.toString())));
}
} catch (NumberFormatException nfe) {
LOG.trace("Failed to narrow type of constant", nfe);
if ((genericUDF instanceof GenericUDFOPEqual && !NumberUtils.isNumber(constVal.toString()))) {
return new ExprNodeConstantDesc(false);
}
}
// if column type is char and constant type is string, then convert the constant to char
// type with padded spaces.
if (constType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) &&
colTypeInfo instanceof CharTypeInfo) {
final Object originalValue = ((ExprNodeConstantDesc) children.get(constIdx)).getValue();
final String constValue = originalValue.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(colTypeInfo);
final HiveChar newValue = new HiveChar(constValue, length);
children.set(constIdx, new ExprNodeConstantDesc(colTypeInfo, newValue));
}
}
if (genericUDF instanceof GenericUDFOPOr) {
// flatten OR
List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(
children.size());
for (ExprNodeDesc child : children) {
if (FunctionRegistry.isOpOr(child)) {
childrenList.addAll(child.getChildren());
} else {
childrenList.add(child);
}
}
desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
childrenList);
} else if (genericUDF instanceof GenericUDFOPAnd) {
// flatten AND
List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(
children.size());
for (ExprNodeDesc child : children) {
if (FunctionRegistry.isOpAnd(child)) {
childrenList.addAll(child.getChildren());
} else {
childrenList.add(child);
}
}
desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
childrenList);
} else if (ctx.isFoldExpr() && canConvertIntoNvl(genericUDF, children)) {
// Rewrite CASE into NVL
desc = ExprNodeGenericFuncDesc.newInstance(new GenericUDFNvl(),
Lists.newArrayList(children.get(0), new ExprNodeConstantDesc(false)));
if (Boolean.FALSE.equals(((ExprNodeConstantDesc) children.get(1)).getValue())) {
desc = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPNot(),
Lists.newArrayList(desc));
}
} else {
desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
children);
}
// If the function is deterministic and the children are constants,
// we try to fold the expression to remove e.g. cast on constant
if (ctx.isFoldExpr() && desc instanceof ExprNodeGenericFuncDesc &&
FunctionRegistry.isDeterministic(genericUDF) &&
ExprNodeDescUtils.isAllConstants(children)) {
ExprNodeDesc constantExpr = ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc)desc);
if (constantExpr != null) {
desc = constantExpr;
}
}
}
// UDFOPPositive is a no-op.
// However, we still create it, and then remove it here, to make sure we
// only allow
// "+" for numeric types.
if (FunctionRegistry.isOpPositive(desc)) {
assert (desc.getChildren().size() == 1);
desc = desc.getChildren().get(0);
}
assert (desc != null);
return desc;
}
private boolean canConvertIntoNvl(GenericUDF genericUDF, ArrayList<ExprNodeDesc> children) {
if (genericUDF instanceof GenericUDFWhen && children.size() == 3 &&
children.get(1) instanceof ExprNodeConstantDesc &&
children.get(2) instanceof ExprNodeConstantDesc) {
ExprNodeConstantDesc constThen = (ExprNodeConstantDesc) children.get(1);
ExprNodeConstantDesc constElse = (ExprNodeConstantDesc) children.get(2);
Object thenVal = constThen.getValue();
Object elseVal = constElse.getValue();
if (thenVal instanceof Boolean && elseVal instanceof Boolean) {
return true;
}
}
return false;
}
/**
* Returns true if des is a descendant of ans (ancestor)
*/
private boolean isDescendant(Node ans, Node des) {
if (ans.getChildren() == null) {
return false;
}
for (Node c : ans.getChildren()) {
if (c == des) {
return true;
}
if (isDescendant(c, des)) {
return true;
}
}
return false;
}
protected ExprNodeDesc processQualifiedColRef(TypeCheckCtx ctx, ASTNode expr,
Object... nodeOutputs) throws SemanticException {
RowResolver input = ctx.getInputRR();
String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getChild(0)
.getText());
// NOTE: tableAlias must be a valid non-ambiguous table alias,
// because we've checked that in TOK_TABLE_OR_COL's process method.
String colName;
if (nodeOutputs[1] instanceof ExprNodeConstantDesc) {
colName = ((ExprNodeConstantDesc) nodeOutputs[1]).getValue().toString();
} else if (nodeOutputs[1] instanceof ExprNodeColumnDesc) {
colName = ((ExprNodeColumnDesc)nodeOutputs[1]).getColumn();
} else {
throw new SemanticException("Unexpected ExprNode : " + nodeOutputs[1]);
}
ColumnInfo colInfo = input.get(tableAlias, colName);
if (colInfo == null) {
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)), expr);
return null;
}
return toExprNodeDesc(colInfo);
}
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
// Here we know nd represents a group by expression.
// During the DFS traversal of the AST, a descendant of nd likely set an
// error because a sub-tree of nd is unlikely to also be a group by
// expression. For example, in a query such as
// SELECT *concat(key)* FROM src GROUP BY concat(key), 'key' will be
// processed before 'concat(key)' and since 'key' is not a group by
// expression, an error will be set in ctx by ColumnExprProcessor.
// We can clear the global error when we see that it was set in a
// descendant node of a group by expression because
// processGByExpr() returns a ExprNodeDesc that effectively ignores
// its children. Although the error can be set multiple times by
// descendant nodes, DFS traversal ensures that the error only needs to
// be cleared once. Also, for a case like
// SELECT concat(value, concat(value))... the logic still works as the
// error is only set with the first 'value'; all node processors quit
// early if the global error is set.
if (isDescendant(nd, ctx.getErrorSrcNode())) {
ctx.setError(null, null);
}
return desc;
}
if (ctx.getError() != null) {
return null;
}
ASTNode expr = (ASTNode) nd;
/*
* A Windowing specification get added as a child to a UDAF invocation to distinguish it
* from similar UDAFs but on different windows.
* The UDAF is translated to a WindowFunction invocation in the PTFTranslator.
* So here we just return null for tokens that appear in a Window Specification.
* When the traversal reaches up to the UDAF invocation its ExprNodeDesc is build using the
* ColumnInfo in the InputRR. This is similar to how UDAFs are handled in Select lists.
* The difference is that there is translation for Window related tokens, so we just
* return null;
*/
if (windowingTokens.contains(expr.getType())) {
if (!ctx.getallowWindowing())
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
ErrorMsg.INVALID_FUNCTION.getMsg("Windowing is not supported in the context")));
return null;
}
if (expr.getType() == HiveParser.TOK_TABNAME) {
return null;
}
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
if (!ctx.getallowAllColRef())
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
ErrorMsg.INVALID_COLUMN
.getMsg("All column reference is not supported in the context")));
RowResolver input = ctx.getInputRR();
ExprNodeColumnListDesc columnList = new ExprNodeColumnListDesc();
assert expr.getChildCount() <= 1;
if (expr.getChildCount() == 1) {
// table aliased (select a.*, for example)
ASTNode child = (ASTNode) expr.getChild(0);
assert child.getType() == HiveParser.TOK_TABNAME;
assert child.getChildCount() == 1;
String tableAlias = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
HashMap<String, ColumnInfo> columns = input.getFieldMap(tableAlias);
if (columns == null) {
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(child));
}
for (Map.Entry<String, ColumnInfo> colMap : columns.entrySet()) {
ColumnInfo colInfo = colMap.getValue();
if (!colInfo.getIsVirtualCol()) {
columnList.addColumn(toExprNodeDesc(colInfo));
}
}
} else {
// all columns (select *, for example)
for (ColumnInfo colInfo : input.getColumnInfos()) {
if (!colInfo.getIsVirtualCol()) {
columnList.addColumn(toExprNodeDesc(colInfo));
}
}
}
return columnList;
}
// If the first child is a TOK_TABLE_OR_COL, and nodeOutput[0] is NULL,
// and the operator is a DOT, then it's a table column reference.
if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& nodeOutputs[0] == null) {
return processQualifiedColRef(ctx, expr, nodeOutputs);
}
// Return nulls for conversion operators
if (conversionFunctionTextHashMap.keySet().contains(expr.getType())
|| specialFunctionTextHashMap.keySet().contains(expr.getType())
|| expr.getToken().getType() == HiveParser.CharSetName
|| expr.getToken().getType() == HiveParser.CharSetLiteral) {
return null;
}
boolean isFunction = (expr.getType() == HiveParser.TOK_FUNCTION ||
expr.getType() == HiveParser.TOK_FUNCTIONSTAR ||
expr.getType() == HiveParser.TOK_FUNCTIONDI);
if (!ctx.getAllowDistinctFunctions() && expr.getType() == HiveParser.TOK_FUNCTIONDI) {
throw new SemanticException(
SemanticAnalyzer.generateErrorMessage(expr, ErrorMsg.DISTINCT_NOT_SUPPORTED.getMsg()));
}
// Create all children
int childrenBegin = (isFunction ? 1 : 0);
ArrayList<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(
expr.getChildCount() - childrenBegin);
for (int ci = childrenBegin; ci < expr.getChildCount(); ci++) {
if (nodeOutputs[ci] instanceof ExprNodeColumnListDesc) {
children.addAll(((ExprNodeColumnListDesc) nodeOutputs[ci]).getChildren());
} else {
children.add((ExprNodeDesc) nodeOutputs[ci]);
}
}
if (expr.getType() == HiveParser.TOK_FUNCTIONSTAR) {
if (!ctx.getallowFunctionStar())
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(expr,
ErrorMsg.INVALID_COLUMN
.getMsg(".* reference is not supported in the context")));
RowResolver input = ctx.getInputRR();
for (ColumnInfo colInfo : input.getColumnInfos()) {
if (!colInfo.getIsVirtualCol()) {
children.add(toExprNodeDesc(colInfo));
}
}
}
// If any of the children contains null, then return a null
// this is a hack for now to handle the group by case
if (children.contains(null)) {
List<String> possibleColumnNames = getReferenceableColumnAliases(ctx);
String reason = String.format("(possible column names are: %s)",
StringUtils.join(possibleColumnNames, ", "));
ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(0), reason),
expr);
return null;
}
// Create function desc
try {
return getXpathOrFuncExprNodeDesc(expr, isFunction, children, ctx);
} catch (UDFArgumentTypeException e) {
throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE.getMsg(expr
.getChild(childrenBegin + e.getArgumentId()), e.getMessage()), e);
} catch (UDFArgumentLengthException e) {
throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_LENGTH.getMsg(
expr, e.getMessage()), e);
} catch (UDFArgumentException e) {
throw new SemanticException(ErrorMsg.INVALID_ARGUMENT.getMsg(expr, e
.getMessage()), e);
}
}
protected List<String> getReferenceableColumnAliases(TypeCheckCtx ctx) {
return ctx.getInputRR().getReferenceableColumnAliases(null, -1);
}
}
/**
* Factory method to get DefaultExprProcessor.
*
* @return DefaultExprProcessor.
*/
public DefaultExprProcessor getDefaultExprProcessor() {
return new DefaultExprProcessor();
}
/**
* Processor for subquery expressions..
*/
public static class SubQueryExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
if (ctx.getError() != null) {
return null;
}
ASTNode expr = (ASTNode) nd;
ASTNode sqNode = (ASTNode) expr.getParent().getChild(1);
if (!ctx.getallowSubQueryExpr())
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(sqNode,
ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg()));
ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
if (desc != null) {
return desc;
}
/*
* Restriction.1.h :: SubQueries only supported in the SQL Where Clause.
*/
ctx.setError(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(sqNode,
"Currently SubQuery expressions are only allowed as Where Clause predicates"),
sqNode);
return null;
}
}
/**
* Factory method to get SubQueryExprProcessor.
*
* @return DateExprProcessor.
*/
public SubQueryExprProcessor getSubQueryExprProcessor() {
return new SubQueryExprProcessor();
}
}
|
BUPTAnderson/apache-hive-2.1.1-src
|
ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
|
Java
|
apache-2.0
| 56,329 |
/**
* Copyright 2014 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// Module dependencies
var express = require('express'),
favicon = require('serve-favicon'),
errorhandler = require('errorhandler'),
bodyParser = require('body-parser'),
csrf = require('csurf'),
cookieParser = require('cookie-parser');
module.exports = function (app) {
app.set('view engine', 'ejs');
app.enable('trust proxy');
// use only https
var env = process.env.NODE_ENV || 'development';
if ('production' === env) {
app.use(errorhandler());
}
// Configure Express
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
// Setup static public directory
app.use(express.static(__dirname + '/../public'));
app.use(favicon(__dirname + '/../public/images/favicon.ico'));
// cookies
var secret = Math.random().toString(36).substring(7);
app.use(cookieParser(secret));
// csrf
var csrfProtection = csrf({ cookie: true });
app.get('/', csrfProtection, function(req, res) {
res.render('index', { ct: req.csrfToken() });
});
// apply to all requests that begin with /api/
// csfr token
app.use('/api/', csrfProtection);
};
|
tonybndt/BlueMix-Tutorials
|
config/express.js
|
JavaScript
|
apache-2.0
| 1,764 |
sap.ui.define(['exports'], function (exports) { 'use strict';
var messagebundle_es = {
BARCODE_SCANNER_DIALOG_CANCEL_BUTTON_TXT: "Cancelar",
BARCODE_SCANNER_DIALOG_LOADING_TXT: "Cargando",
FCL_START_COLUMN_TXT: "Primera columna",
FCL_MIDDLE_COLUMN_TXT: "Columna media",
FCL_END_COLUMN_TXT: "Última columna",
FCL_START_COLUMN_EXPAND_BUTTON_TOOLTIP: "Desplegar la primera columna",
FCL_START_COLUMN_COLLAPSE_BUTTON_TOOLTIP: "Comprimir la primera columna",
FCL_END_COLUMN_EXPAND_BUTTON_TOOLTIP: "Desplegar la última columna",
FCL_END_COLUMN_COLLAPSE_BUTTON_TOOLTIP: "Comprimir la última columna",
NOTIFICATION_LIST_ITEM_TXT: "Notificación",
NOTIFICATION_LIST_ITEM_SHOW_MORE: "Visualizar más",
NOTIFICATION_LIST_ITEM_SHOW_LESS: "Visualizar menos",
NOTIFICATION_LIST_ITEM_OVERLOW_BTN_TITLE: "Más",
NOTIFICATION_LIST_ITEM_CLOSE_BTN_TITLE: "Cerrar",
NOTIFICATION_LIST_ITEM_READ: "Leídos",
NOTIFICATION_LIST_ITEM_UNREAD: "No leídos",
NOTIFICATION_LIST_ITEM_HIGH_PRIORITY_TXT: "Prioridad alta",
NOTIFICATION_LIST_ITEM_MEDIUM_PRIORITY_TXT: "Prioridad media",
NOTIFICATION_LIST_ITEM_LOW_PRIORITY_TXT: "Prioridad baja",
NOTIFICATION_LIST_GROUP_ITEM_TXT: "Grupo de notificaciones",
NOTIFICATION_LIST_GROUP_ITEM_COUNTER_TXT: "Contador",
NOTIFICATION_LIST_GROUP_ITEM_CLOSE_BTN_TITLE: "Cerrar todo",
NOTIFICATION_LIST_GROUP_ITEM_TOGGLE_BTN_COLLAPSE_TITLE: "Ocultar grupo",
NOTIFICATION_LIST_GROUP_ITEM_TOGGLE_BTN_EXPAND_TITLE: "Desplegar grupo",
TIMELINE_ARIA_LABEL: "Cronología",
UPLOADCOLLECTIONITEM_CANCELBUTTON_TEXT: "Cancelar",
UPLOADCOLLECTIONITEM_RENAMEBUTTON_TEXT: "Cambiar nombre",
UPLOADCOLLECTIONITEM_ERROR_STATE: "Concluido",
UPLOADCOLLECTIONITEM_READY_STATE: "Pendiente",
UPLOADCOLLECTIONITEM_UPLOADING_STATE: "Cargando",
UPLOADCOLLECTIONITEM_TERMINATE_BUTTON_TEXT: "Finalizar",
UPLOADCOLLECTIONITEM_RETRY_BUTTON_TEXT: "Volver a intentar",
UPLOADCOLLECTIONITEM_EDIT_BUTTON_TEXT: "Editar",
UPLOADCOLLECTION_NO_DATA_TEXT: "No existen ficheros.",
UPLOADCOLLECTION_NO_DATA_DESCRIPTION: "Soltar los ficheros para cargarlos o utilizar el botón \"Cargar\".",
UPLOADCOLLECTION_ARIA_ROLE_DESCRIPTION: "Cargar colección",
UPLOADCOLLECTION_DRAG_FILE_INDICATOR: "Arrastrar ficheros aquí.",
UPLOADCOLLECTION_DROP_FILE_INDICATOR: "Soltar ficheros para cargalos.",
SHELLBAR_LABEL: "Barra de shell",
SHELLBAR_LOGO: "Logotipo",
SHELLBAR_COPILOT: "CoPilot",
SHELLBAR_NOTIFICATIONS: "Notificaciones {0}",
SHELLBAR_PROFILE: "Perfil",
SHELLBAR_PRODUCTS: "Productos",
PRODUCT_SWITCH_CONTAINER_LABEL: "Productos",
SHELLBAR_SEARCH: "Buscar",
SHELLBAR_OVERFLOW: "Más",
SHELLBAR_CANCEL: "Cancelar",
WIZARD_NAV_ARIA_LABEL: "Barra de progreso del asistente",
WIZARD_LIST_ARIA_LABEL: "Pasos del asistente",
WIZARD_LIST_ARIA_DESCRIBEDBY: "Para activarlo, pulse la barra espaciadora o Intro",
WIZARD_ACTIONSHEET_STEPS_ARIA_LABEL: "Pasos",
WIZARD_OPTIONAL_STEP_ARIA_LABEL: "Opcional",
WIZARD_STEP_ACTIVE: "Activo",
WIZARD_STEP_INACTIVE: "Inactivo",
WIZARD_STEP_ARIA_LABEL: "Paso {0}",
WIZARD_NAV_ARIA_ROLE_DESCRIPTION: "Asistente",
WIZARD_NAV_STEP_DEFAULT_HEADING: "Paso",
VSD_DIALOG_TITLE_SORT: "Ver opciones",
VSD_SUBMIT_BUTTON: "OK",
VSD_CANCEL_BUTTON: "Cancelar",
VSD_RESET_BUTTON: "Reinicializar",
VSD_SORT_ORDER: "Orden de clasificación",
VSD_FILTER_BY: "Filtrar por",
VSD_SORT_BY: "Clasificar por",
VSD_ORDER_ASCENDING: "Ascendente",
VSD_ORDER_DESCENDING: "Descendente",
IM_TITLE_BEFORESEARCH: "Obtengamos resultados",
IM_SUBTITLE_BEFORESEARCH: "Comience proporcionando los criterios de búsqueda.",
IM_TITLE_NOACTIVITIES: "Todavía no ha añadido actividades",
IM_SUBTITLE_NOACTIVITIES: "¿Desea añadir una ahora?",
IM_TITLE_NODATA: "Todavía no hay datos",
IM_SUBTITLE_NODATA: "Cuando haya, los verá aquí.",
IM_TITLE_NOMAIL: "Ningún correo nuevo",
IM_SUBTITLE_NOMAIL: "Vuelva a comprobarlo de nuevo más tarde.",
IM_TITLE_NOENTRIES: "Todavía no hay entradas",
IM_SUBTITLE_NOENTRIES: "Cuando haya, las verá aquí.",
IM_TITLE_NONOTIFICATIONS: "No tiene ninguna notificación nueva",
IM_SUBTITLE_NONOTIFICATIONS: "Vuelva a comprobarlo de nuevo más tarde.",
IM_TITLE_NOSAVEDITEMS: "Todavía no ha añadido favoritos",
IM_SUBTITLE_NOSAVEDITEMS: "¿Desea crear una lista de las posiciones favoritas ahora?",
IM_TITLE_NOSEARCHRESULTS: "No existen resultados",
IM_SUBTITLE_NOSEARCHRESULTS: "Intente modificar los criterios de búsqueda.",
IM_TITLE_NOTASKS: "No tiene ninguna tarea nueva",
IM_SUBTITLE_NOTASKS: "Cuando tenga, las verá aquí.",
IM_TITLE_UNABLETOLOAD: "No se pueden cargar datos",
IM_SUBTITLE_UNABLETOLOAD: "Compruebe su conexión a internet. Si esto no funciona, intente volver a cargar la página. Si esto tampoco funciona, verifique con su administrador.",
IM_TITLE_UNABLETOLOADIMAGE: "No se puede cargar la imagen",
IM_SUBTITLE_UNABLETOLOADIMAGE: "No se ha podido encontrar la imagen en la ubicación especificada o el servidor no responde.",
IM_TITLE_UNABLETOUPLOAD: "No se pueden cargar datos",
IM_SUBTITLE_UNABLETOUPLOAD: "Compruebe su conexión a internet. Si esto no funciona, compruebe el formato de fichero y el tamaño de fichero. De lo contrario, póngase en contacto con su administrador.",
IM_TITLE_ADDCOLUMN: "Parece que hay espacio libre",
IM_SUBTITLE_ADDCOLUMN: "Puede añadir más columnas en las opciones de tabla.",
IM_TITLE_ADDPEOPLE: "Aún no ha añadido a nadie al calendario",
IM_SUBTITLE_ADDPEOPLE: "¿Desea añadir a alguien ahora?",
IM_TITLE_BALLOONSKY: "Se le valora.",
IM_SUBTITLE_BALLOONSKY: "Siga trabajando tan bien.",
IM_TITLE_EMPTYPLANNINGCALENDAR: "Aún no hay nada planificado",
IM_SUBTITLE_EMPTYPLANNINGCALENDAR: "No hay actividades en este intervalo de tiempo",
IM_TITLE_FILTERTABLE: "Hay opciones de filtro disponibles",
IM_SUBTITLE_FILTERTABLE: "Los filtros le ayudan a concentrarse en lo que considera más relevante.",
IM_TITLE_GROUPTABLE: "Intente agrupar elementos para obtener un mejor resumen",
IM_SUBTITLE_GROUPTABLE: "Puede optar por agrupar categorías en las opciones de grupo.",
IM_TITLE_NOFILTERRESULTS: "No existen resultados",
IM_SUBTITLE_NOFILTERRESULTS: "Intente ajustar sus criterio de filtro.",
IM_TITLE_PAGENOTFOUND: "Lo sentimos, la página no existe",
IM_SUBTITLE_PAGENOTFOUND: "Verifique el URL que está utilizando para llamar la aplicación.",
IM_TITLE_RESIZECOLUMN: "Seleccione su propio ancho de columna",
IM_SUBTITLE_RESIZECOLUMN: "Puede ajustar columnas arrastrando los bordes de la columna.",
IM_TITLE_SORTCOLUMN: "¿No ve primero los elementos más importantes?",
IM_SUBTITLE_SORTCOLUMN: "Seleccione los criterios de clasificación en las opciones de clasificación.",
IM_TITLE_SUCCESSSCREEN: "Bien hecho.",
IM_SUBTITLE_SUCCESSSCREEN: "Ha completado todas sus asignaciones de aprendizaje.",
IM_TITLE_UPLOADCOLLECTION: "Suelte aquí los archivos",
IM_SUBTITLE_UPLOADCOLLECTION: "También puede cargar varios archivos a la vez.",
DSC_SIDE_ARIA_LABEL: "Contenido lateral"
};
exports.default = messagebundle_es;
});
|
SAP/openui5
|
src/sap.ui.webc.fiori/src/sap/ui/webc/fiori/thirdparty/_chunks/messagebundle_es.js
|
JavaScript
|
apache-2.0
| 7,123 |
/*
By: facug91
From: https://uva.onlinejudge.org/index.php?option=com_onlinejudge&Itemid=8&page=show_problem&problem=1335
Name: Twin Primes
Date: 23/10/2015
*/
#include <bits/stdc++.h>
#define endl "\n"
#define EPS 1e-9
#define MP make_pair
#define F first
#define S second
#define DB(x) cerr << " #" << (#x) << ": " << (x)
#define DBL(x) cerr << " #" << (#x) << ": " << (x) << endl
const double PI = 2.0*acos(0.0);
#define INF 1000000000
//#define MOD 1000000007ll
//#define MAXN 10005
using namespace std;
typedef long long ll;
typedef unsigned long long llu;
typedef pair<int, int> ii; typedef pair<ii, ii> iiii;
typedef vector<int> vi; typedef vector<ii> vii; typedef vector<iiii> viiii;
int s, a, b;
bool sieve[20000005];
vii ans;
int main () {
ios_base::sync_with_stdio(0); cin.tie(0);
//cout<<fixed<<setprecision(7); cerr<<fixed<<setprecision(7); //cin.ignore(INT_MAX, ' '); //cout << setfill('0') << setw(5) << 25
int tc = 1, i, j;
for (i=0; i<20000005; i++) sieve[i] = true;
sieve[0] = sieve[1] = false;
for (i=4; i<20000005; i+=2) sieve[i] = false;
int sq = sqrt(20000005)+1;
for (i=3; i<sq; i+=2)
if (sieve[i])
for (j=i*i; j<20000005; j+=i+i)
sieve[j] = false;
a = 3; b = 5;
while (b < 20000005) {
if (sieve[a] && sieve[b]) ans.emplace_back(a, b);
a += 2;
b += 2;
}
while (cin>>s) {
s--;
cout<<"("<<ans[s].F<<", "<<ans[s].S<<")"<<endl;
}
return 0;
}
|
facug91/OJ-Solutions
|
uva.onlinejudge.org/TwinPrimes.cpp
|
C++
|
apache-2.0
| 1,438 |
package com.digitalpetri.enip.cip.services;
import com.digitalpetri.enip.cip.CipResponseException;
import com.digitalpetri.enip.cip.epath.EPath.PaddedEPath;
import com.digitalpetri.enip.cip.structs.MessageRouterRequest;
import com.digitalpetri.enip.cip.structs.MessageRouterResponse;
import io.netty.buffer.ByteBuf;
import io.netty.util.ReferenceCountUtil;
public class GetAttributesAllService implements CipService<ByteBuf> {
public static final int SERVICE_CODE = 0x01;
private final PaddedEPath requestPath;
public GetAttributesAllService(PaddedEPath requestPath) {
this.requestPath = requestPath;
}
@Override
public void encodeRequest(ByteBuf buffer) {
MessageRouterRequest request = new MessageRouterRequest(
SERVICE_CODE,
requestPath,
byteBuf -> {
}
);
MessageRouterRequest.encode(request, buffer);
}
@Override
public ByteBuf decodeResponse(ByteBuf buffer) throws CipResponseException {
MessageRouterResponse response = MessageRouterResponse.decode(buffer);
if (response.getGeneralStatus() == 0x00) {
return response.getData();
} else {
ReferenceCountUtil.release(response.getData());
throw new CipResponseException(response.getGeneralStatus(), response.getAdditionalStatus());
}
}
}
|
digitalpetri/ethernet-ip
|
cip-core/src/main/java/com/digitalpetri/enip/cip/services/GetAttributesAllService.java
|
Java
|
apache-2.0
| 1,389 |
/*
* (C) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hp.ov.sdk.dto.networking;
public enum LagState {
Aggregation,
Collecting,
Defaulted,
Distributing,
Expired,
LacpActivity,
LacpTimeout,
Synchronization,
Unknown
}
|
HewlettPackard/oneview-sdk-java
|
oneview-sdk-java-lib/src/main/java/com/hp/ov/sdk/dto/networking/LagState.java
|
Java
|
apache-2.0
| 839 |
package com.jsh.erp.service.materialCategory;
import com.alibaba.fastjson.JSONObject;
import com.jsh.erp.service.ICommonQuery;
import com.jsh.erp.service.materialProperty.MaterialPropertyResource;
import com.jsh.erp.service.materialProperty.MaterialPropertyService;
import com.jsh.erp.utils.Constants;
import com.jsh.erp.utils.QueryUtils;
import com.jsh.erp.utils.StringUtil;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.Map;
@Service(value = "materialCategory_component")
@MaterialCategoryResource
public class MaterialCategoryComponent implements ICommonQuery {
@Resource
private MaterialCategoryService materialCategoryService;
@Override
public Object selectOne(Long id) throws Exception {
return materialCategoryService.getMaterialCategory(id);
}
@Override
public List<?> select(Map<String, String> map)throws Exception {
return getMaterialCategoryList(map);
}
private List<?> getMaterialCategoryList(Map<String, String> map) throws Exception{
String search = map.get(Constants.SEARCH);
String name = StringUtil.getInfo(search, "name");
Integer parentId = StringUtil.parseInteger(StringUtil.getInfo(search, "parentId"));
String order = QueryUtils.order(map);
return materialCategoryService.select(name, parentId, QueryUtils.offset(map), QueryUtils.rows(map));
}
@Override
public Long counts(Map<String, String> map)throws Exception {
String search = map.get(Constants.SEARCH);
String name = StringUtil.getInfo(search, "name");
Integer parentId = StringUtil.parseInteger(StringUtil.getInfo(search, "parentId"));
return materialCategoryService.countMaterialCategory(name, parentId);
}
@Override
public int insert(JSONObject obj, HttpServletRequest request)throws Exception {
return materialCategoryService.insertMaterialCategory(obj, request);
}
@Override
public int update(JSONObject obj, HttpServletRequest request)throws Exception {
return materialCategoryService.updateMaterialCategory(obj, request);
}
@Override
public int delete(Long id, HttpServletRequest request)throws Exception {
return materialCategoryService.deleteMaterialCategory(id, request);
}
@Override
public int deleteBatch(String ids, HttpServletRequest request)throws Exception {
return materialCategoryService.batchDeleteMaterialCategory(ids, request);
}
@Override
public int checkIsNameExist(Long id, String name)throws Exception {
return materialCategoryService.checkIsNameExist(id, name);
}
}
|
jishenghua/JSH_ERP
|
jshERP-boot/src/main/java/com/jsh/erp/service/materialCategory/MaterialCategoryComponent.java
|
Java
|
apache-2.0
| 2,746 |
<?php
include 'app.php';
// render template
echo $twig->render('contact.twig', array());
|
FSE301-Photerra/photerras
|
contact.php
|
PHP
|
apache-2.0
| 90 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202111;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
*
* Represents the actions that can be performed on slates.
*
*
* <p>Java class for SlateAction complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="SlateAction">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "SlateAction")
@XmlSeeAlso({
UnarchiveSlates.class,
ArchiveSlates.class
})
public abstract class SlateAction {
}
|
googleads/googleads-java-lib
|
modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202111/SlateAction.java
|
Java
|
apache-2.0
| 1,537 |
package br.pucminas.icei.audition.repository;
/**
* @author Claudinei Gomes Mendes
*/
import br.pucminas.icei.audition.dto.SearchResponse;
import info.atende.audition.model.AuditEvent;
import info.atende.audition.model.SecurityLevel;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import java.time.LocalDateTime;
import java.util.*;
@Component
@Repository
public class AuditEventRepository {
@PersistenceContext
private EntityManager em;
@Transactional
public void create(AuditEvent auditEvent) {
em.persist(auditEvent);
}
public SearchResponse search(Map<String, Object> filtro, Long start, Long max) {
return search(filtro, start, max, null, null);
}
public SearchResponse search(Map<String, Object> filtro, Long start, Long max,
LocalDateTime dStart, LocalDateTime dEnd) {
String securityLevel = (String) filtro.get("securityLevel");
if (securityLevel != null) {
filtro.put("securityLevel", SecurityLevel.valueOf(securityLevel));
}
return buildQuery(filtro, start, max, dStart, dEnd);
}
public List<String> listApplicationNames() {
return em.createQuery("SELECT distinct e.applicationName from AuditEvent e order by e.applicationName").getResultList();
}
public List<String> listResourceTypes(){
return em.createQuery("SELECT distinct e.resource.resourceType from AuditEvent e order by e.resource.resourceType").getResultList();
}
private SearchResponse buildQuery(Map<String, Object> filtro,
Long start,
Long max,
LocalDateTime dateStart,
LocalDateTime dateEnd) {
CriteriaBuilder cb = em.getCriteriaBuilder();
CriteriaQuery<AuditEvent> q = cb.createQuery(AuditEvent.class);
Root<AuditEvent> root = q.from(AuditEvent.class);
List<Predicate> predicates = new ArrayList();
Iterator it = filtro.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pair = (Map.Entry) it.next();
String key = (String) pair.getKey();
if(key.equals("resourceType")){
predicates.add(cb.equal(root.get("resource").get(key), pair.getValue()));
}else if (key.equals("action")) {
predicates.add(cb.like(root.get(key), pair.getValue() + "%"));
} else {
predicates.add(cb.equal(root.get(key), pair.getValue()));
}
it.remove(); // avoids a ConcurrentModificationException
}
// Dates
if (dateStart != null && dateEnd != null) {
predicates.add(cb.between(root.get("dateTime"), dateStart, dateEnd));
}
CriteriaQuery<AuditEvent> where = q.where(cb.and(predicates.toArray(new Predicate[predicates.size()])));
Long countResult = JpaUtils.count(em, where);
q.select(root);
where.orderBy(cb.asc(root.get("dateTime")));
TypedQuery<AuditEvent> query = em.createQuery(where);
// Pagination
if (start != null && max != null) {
query.setFirstResult(start.intValue())
.setMaxResults(max.intValue());
}
List<AuditEvent> result = query.getResultList();
return new SearchResponse(countResult, result);
}
}
|
atende/audit-view
|
src/main/java/br/pucminas/icei/audition/repository/AuditEventRepository.java
|
Java
|
apache-2.0
| 3,874 |
package resa.metrics;
import backtype.storm.metric.api.IMetric;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Created by ding on 14-8-12.
*/
public class StatMetric implements IMetric {
private double[] xAxis;
private Map<String, long[]> data;
public StatMetric(double[] xAxis) {
this.xAxis = xAxis;
if (xAxis != null && xAxis.length > 0) {
data = new HashMap<>();
}
}
public void add(String key, double value) {
int pos = Arrays.binarySearch(xAxis, value);
if (pos < 0) {
pos = -pos - 1;
}
data.computeIfAbsent(key, k -> new long[xAxis.length + 1])[pos]++;
// long[] stat = data.computeIfAbsent(key, k -> new long[(xAxis.length + 1) * 2]);
// stat[pos * 2]++;
// stat[pos * 2 + 1] = Double.doubleToLongBits(Double.longBitsToDouble(stat[pos * 2 + 1]) + value);
}
@Override
public Object getValueAndReset() {
if (data == null || data.isEmpty()) {
return null;
}
Map<String, String> ret = new HashMap<>();
data.forEach((k, v) -> ret.put(k, stat2String(v)));
data = new HashMap<>();
return ret;
}
private String stat2String(long[] statData) {
StringBuilder sb = new StringBuilder();
sb.append(statData[0]);
for (int i = 1; i < statData.length; i++) {
sb.append(',');
sb.append(statData[i]);
}
// for (int i = 2; i < statData.length; i += 2) {
// sb.append(',');
// sb.append(statData[i]);
// }
// sb.append(";");
// sb.append(Double.longBitsToDouble(statData[1]));
// for (int i = 3; i < statData.length; i += 2) {
// sb.append(',');
// sb.append(Double.longBitsToDouble(statData[i]));
// }
return sb.toString();
}
}
|
ADSC-Cloud/resa
|
resa-core/src/main/java/resa/metrics/StatMetric.java
|
Java
|
apache-2.0
| 1,913 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.internal.statistic.eventLog.validator.persistence;
import com.intellij.openapi.components.*;
import com.intellij.openapi.util.text.StringUtil;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.Map;
@State(
name = "EventLogWhitelist",
storages = @Storage(value = EventLogWhitelistSettingsPersistence.USAGE_STATISTICS_XML, roamingType = RoamingType.DISABLED)
)
public class EventLogWhitelistSettingsPersistence implements PersistentStateComponent<Element> {
public static final String USAGE_STATISTICS_XML = "usage.statistics.xml";
private final Map<String, Long> myLastModifications = new HashMap<>();
private final Map<String, WhitelistPathSettings> myRecorderToPathSettings = new HashMap<>();
private static final String WHITELIST_MODIFY = "update";
private static final String RECORDER_ID = "recorder-id";
private static final String LAST_MODIFIED = "last-modified";
private static final String PATH = "path";
private static final String CUSTOM_PATH = "custom-path";
private static final String USE_CUSTOM_PATH = "use-custom-path";
public static EventLogWhitelistSettingsPersistence getInstance() {
return ServiceManager.getService(EventLogWhitelistSettingsPersistence.class);
}
public long getLastModified(@NotNull String recorderId) {
return myLastModifications.containsKey(recorderId) ? Math.max(myLastModifications.get(recorderId), 0) : 0;
}
public void setLastModified(@NotNull String recorderId, long lastUpdate) {
myLastModifications.put(recorderId, Math.max(lastUpdate, 0));
}
@Nullable
public WhitelistPathSettings getPathSettings(@NotNull String recorderId) {
return myRecorderToPathSettings.get(recorderId);
}
public void setPathSettings(@NotNull String recorderId, @NotNull WhitelistPathSettings settings) {
myRecorderToPathSettings.put(recorderId, settings);
}
@Override
public void loadState(@NotNull final Element element) {
myLastModifications.clear();
for (Element update : element.getChildren(WHITELIST_MODIFY)) {
final String recorder = update.getAttributeValue(RECORDER_ID);
if (StringUtil.isNotEmpty(recorder)) {
final long lastUpdate = parseLastUpdate(update);
myLastModifications.put(recorder, lastUpdate);
}
}
myRecorderToPathSettings.clear();
for (Element path : element.getChildren(PATH)) {
final String recorder = path.getAttributeValue(RECORDER_ID);
if (StringUtil.isNotEmpty(recorder)) {
String customPath = path.getAttributeValue(CUSTOM_PATH);
if (customPath == null) continue;
boolean useCustomPath = parseUseCustomPath(path);
myRecorderToPathSettings.put(recorder, new WhitelistPathSettings(customPath, useCustomPath));
}
}
}
private static boolean parseUseCustomPath(@NotNull Element update) {
try {
return Boolean.parseBoolean(update.getAttributeValue(USE_CUSTOM_PATH, "false"));
}
catch (NumberFormatException e) {
return false;
}
}
private static long parseLastUpdate(@NotNull Element update) {
try {
return Long.parseLong(update.getAttributeValue(LAST_MODIFIED, "0"));
}
catch (NumberFormatException e) {
return 0;
}
}
@Override
public Element getState() {
final Element element = new Element("state");
for (Map.Entry<String, Long> entry : myLastModifications.entrySet()) {
final Element update = new Element(WHITELIST_MODIFY);
update.setAttribute(RECORDER_ID, entry.getKey());
update.setAttribute(LAST_MODIFIED, String.valueOf(entry.getValue()));
element.addContent(update);
}
for (Map.Entry<String, WhitelistPathSettings> entry : myRecorderToPathSettings.entrySet()) {
final Element path = new Element(PATH);
path.setAttribute(RECORDER_ID, entry.getKey());
WhitelistPathSettings value = entry.getValue();
path.setAttribute(CUSTOM_PATH, value.getCustomPath());
path.setAttribute(USE_CUSTOM_PATH, String.valueOf(value.isUseCustomPath()));
element.addContent(path);
}
return element;
}
@Override
public void noStateLoaded() {
}
}
|
leafclick/intellij-community
|
platform/statistics/src/com/intellij/internal/statistic/eventLog/validator/persistence/EventLogWhitelistSettingsPersistence.java
|
Java
|
apache-2.0
| 4,390 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.federation.router;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_HANDLER_COUNT_DEFAULT;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_HANDLER_COUNT_KEY;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_HANDLER_QUEUE_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_HANDLER_QUEUE_SIZE_KEY;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_READER_COUNT_DEFAULT;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_READER_COUNT_KEY;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_READER_QUEUE_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_READER_QUEUE_SIZE_KEY;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Array;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator;
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType;
import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats;
import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReportListing;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.ZoneReencryptionStatus;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.federation.metrics.FederationRPCMetrics;
import org.apache.hadoop.hdfs.server.federation.resolver.ActiveNamenodeResolver;
import org.apache.hadoop.hdfs.server.federation.resolver.FederationNamespaceInfo;
import org.apache.hadoop.hdfs.server.federation.resolver.FileSubclusterResolver;
import org.apache.hadoop.hdfs.server.federation.resolver.MountTableResolver;
import org.apache.hadoop.hdfs.server.federation.resolver.PathLocation;
import org.apache.hadoop.hdfs.server.federation.resolver.RemoteLocation;
import org.apache.hadoop.hdfs.server.federation.store.records.MountTable;
import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException;
import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Server;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService;
/**
* This class is responsible for handling all of the RPC calls to the It is
* created, started, and stopped by {@link Router}. It implements the
* {@link ClientProtocol} to mimic a
* {@link org.apache.hadoop.hdfs.server.namenode.NameNode NameNode} and proxies
* the requests to the active
* {@link org.apache.hadoop.hdfs.server.namenode.NameNode NameNode}.
*/
public class RouterRpcServer extends AbstractService
implements ClientProtocol, NamenodeProtocol {
private static final Logger LOG =
LoggerFactory.getLogger(RouterRpcServer.class);
/** Configuration for the RPC server. */
private Configuration conf;
/** Identifier for the super user. */
private final String superUser;
/** Identifier for the super group. */
private final String superGroup;
/** Router using this RPC server. */
private final Router router;
/** The RPC server that listens to requests from clients. */
private final Server rpcServer;
/** The address for this RPC server. */
private final InetSocketAddress rpcAddress;
/** RPC clients to connect to the Namenodes. */
private final RouterRpcClient rpcClient;
/** Monitor metrics for the RPC calls. */
private final RouterRpcMonitor rpcMonitor;
/** Interface to identify the active NN for a nameservice or blockpool ID. */
private final ActiveNamenodeResolver namenodeResolver;
/** Interface to map global name space to HDFS subcluster name spaces. */
private final FileSubclusterResolver subclusterResolver;
/** If we are in safe mode, fail requests as if a standby NN. */
private volatile boolean safeMode;
/** Category of the operation that a thread is executing. */
private final ThreadLocal<OperationCategory> opCategory = new ThreadLocal<>();
// Modules implementing groups of RPC calls
/** Router Quota calls. */
private final Quota quotaCall;
/** Erasure coding calls. */
private final ErasureCoding erasureCoding;
/** NamenodeProtocol calls. */
private final RouterNamenodeProtocol nnProto;
/**
* Construct a router RPC server.
*
* @param configuration HDFS Configuration.
* @param nnResolver The NN resolver instance to determine active NNs in HA.
* @param fileResolver File resolver to resolve file paths to subclusters.
* @throws IOException If the RPC server could not be created.
*/
public RouterRpcServer(Configuration configuration, Router router,
ActiveNamenodeResolver nnResolver, FileSubclusterResolver fileResolver)
throws IOException {
super(RouterRpcServer.class.getName());
this.conf = configuration;
this.router = router;
this.namenodeResolver = nnResolver;
this.subclusterResolver = fileResolver;
// User and group for reporting
this.superUser = System.getProperty("user.name");
this.superGroup = this.conf.get(
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_KEY,
DFSConfigKeys.DFS_PERMISSIONS_SUPERUSERGROUP_DEFAULT);
// RPC server settings
int handlerCount = this.conf.getInt(DFS_ROUTER_HANDLER_COUNT_KEY,
DFS_ROUTER_HANDLER_COUNT_DEFAULT);
int readerCount = this.conf.getInt(DFS_ROUTER_READER_COUNT_KEY,
DFS_ROUTER_READER_COUNT_DEFAULT);
int handlerQueueSize = this.conf.getInt(DFS_ROUTER_HANDLER_QUEUE_SIZE_KEY,
DFS_ROUTER_HANDLER_QUEUE_SIZE_DEFAULT);
// Override Hadoop Common IPC setting
int readerQueueSize = this.conf.getInt(DFS_ROUTER_READER_QUEUE_SIZE_KEY,
DFS_ROUTER_READER_QUEUE_SIZE_DEFAULT);
this.conf.setInt(
CommonConfigurationKeys.IPC_SERVER_RPC_READ_CONNECTION_QUEUE_SIZE_KEY,
readerQueueSize);
RPC.setProtocolEngine(this.conf, ClientNamenodeProtocolPB.class,
ProtobufRpcEngine.class);
ClientNamenodeProtocolServerSideTranslatorPB
clientProtocolServerTranslator =
new ClientNamenodeProtocolServerSideTranslatorPB(this);
BlockingService clientNNPbService = ClientNamenodeProtocol
.newReflectiveBlockingService(clientProtocolServerTranslator);
NamenodeProtocolServerSideTranslatorPB namenodeProtocolXlator =
new NamenodeProtocolServerSideTranslatorPB(this);
BlockingService nnPbService = NamenodeProtocolService
.newReflectiveBlockingService(namenodeProtocolXlator);
InetSocketAddress confRpcAddress = conf.getSocketAddr(
RBFConfigKeys.DFS_ROUTER_RPC_BIND_HOST_KEY,
RBFConfigKeys.DFS_ROUTER_RPC_ADDRESS_KEY,
RBFConfigKeys.DFS_ROUTER_RPC_ADDRESS_DEFAULT,
RBFConfigKeys.DFS_ROUTER_RPC_PORT_DEFAULT);
LOG.info("RPC server binding to {} with {} handlers for Router {}",
confRpcAddress, handlerCount, this.router.getRouterId());
this.rpcServer = new RPC.Builder(this.conf)
.setProtocol(ClientNamenodeProtocolPB.class)
.setInstance(clientNNPbService)
.setBindAddress(confRpcAddress.getHostName())
.setPort(confRpcAddress.getPort())
.setNumHandlers(handlerCount)
.setnumReaders(readerCount)
.setQueueSizePerHandler(handlerQueueSize)
.setVerbose(false)
.build();
// Add all the RPC protocols that the Router implements
DFSUtil.addPBProtocol(
conf, NamenodeProtocolPB.class, nnPbService, this.rpcServer);
// We don't want the server to log the full stack trace for some exceptions
this.rpcServer.addTerseExceptions(
RemoteException.class,
SafeModeException.class,
FileNotFoundException.class,
FileAlreadyExistsException.class,
AccessControlException.class,
LeaseExpiredException.class,
NotReplicatedYetException.class,
IOException.class);
this.rpcServer.addSuppressedLoggingExceptions(
StandbyException.class);
// The RPC-server port can be ephemeral... ensure we have the correct info
InetSocketAddress listenAddress = this.rpcServer.getListenerAddress();
this.rpcAddress = new InetSocketAddress(
confRpcAddress.getHostName(), listenAddress.getPort());
// Create metrics monitor
Class<? extends RouterRpcMonitor> rpcMonitorClass = this.conf.getClass(
RBFConfigKeys.DFS_ROUTER_METRICS_CLASS,
RBFConfigKeys.DFS_ROUTER_METRICS_CLASS_DEFAULT,
RouterRpcMonitor.class);
this.rpcMonitor = ReflectionUtils.newInstance(rpcMonitorClass, conf);
// Create the client
this.rpcClient = new RouterRpcClient(this.conf, this.router.getRouterId(),
this.namenodeResolver, this.rpcMonitor);
// Initialize modules
this.quotaCall = new Quota(this.router, this);
this.erasureCoding = new ErasureCoding(this);
this.nnProto = new RouterNamenodeProtocol(this);
}
@Override
protected void serviceInit(Configuration configuration) throws Exception {
this.conf = configuration;
if (this.rpcMonitor == null) {
LOG.error("Cannot instantiate Router RPC metrics class");
} else {
this.rpcMonitor.init(this.conf, this, this.router.getStateStore());
}
super.serviceInit(configuration);
}
@Override
protected void serviceStart() throws Exception {
if (this.rpcServer != null) {
this.rpcServer.start();
LOG.info("Router RPC up at: {}", this.getRpcAddress());
}
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
if (this.rpcServer != null) {
this.rpcServer.stop();
}
if (rpcMonitor != null) {
this.rpcMonitor.close();
}
super.serviceStop();
}
/**
* Get the RPC client to the Namenode.
*
* @return RPC clients to the Namenodes.
*/
public RouterRpcClient getRPCClient() {
return rpcClient;
}
/**
* Get the subcluster resolver.
*
* @return Subcluster resolver.
*/
public FileSubclusterResolver getSubclusterResolver() {
return subclusterResolver;
}
/**
* Get the RPC monitor and metrics.
*
* @return RPC monitor and metrics.
*/
public RouterRpcMonitor getRPCMonitor() {
return rpcMonitor;
}
/**
* Allow access to the client RPC server for testing.
*
* @return The RPC server.
*/
@VisibleForTesting
public Server getServer() {
return rpcServer;
}
/**
* Get the RPC address of the service.
*
* @return RPC service address.
*/
public InetSocketAddress getRpcAddress() {
return rpcAddress;
}
/**
* Check if the Router is in safe mode. We should only see READ, WRITE, and
* UNCHECKED. It includes a default handler when we haven't implemented an
* operation. If not supported, it always throws an exception reporting the
* operation.
*
* @param op Category of the operation to check.
* @param supported If the operation is supported or not. If not, it will
* throw an UnsupportedOperationException.
* @throws SafeModeException If the Router is in safe mode and cannot serve
* client requests.
* @throws UnsupportedOperationException If the operation is not supported.
*/
protected void checkOperation(OperationCategory op, boolean supported)
throws StandbyException, UnsupportedOperationException {
checkOperation(op);
if (!supported) {
if (rpcMonitor != null) {
rpcMonitor.proxyOpNotImplemented();
}
String methodName = getMethodName();
throw new UnsupportedOperationException(
"Operation \"" + methodName + "\" is not supported");
}
}
/**
* Check if the Router is in safe mode. We should only see READ, WRITE, and
* UNCHECKED. This function should be called by all ClientProtocol functions.
*
* @param op Category of the operation to check.
* @throws SafeModeException If the Router is in safe mode and cannot serve
* client requests.
*/
protected void checkOperation(OperationCategory op)
throws StandbyException {
// Log the function we are currently calling.
if (rpcMonitor != null) {
rpcMonitor.startOp();
}
// Log the function we are currently calling.
if (LOG.isDebugEnabled()) {
String methodName = getMethodName();
LOG.debug("Proxying operation: {}", methodName);
}
// Store the category of the operation category for this thread
opCategory.set(op);
// We allow unchecked and read operations
if (op == OperationCategory.UNCHECKED || op == OperationCategory.READ) {
return;
}
if (safeMode) {
// Throw standby exception, router is not available
if (rpcMonitor != null) {
rpcMonitor.routerFailureSafemode();
}
throw new StandbyException("Router " + router.getRouterId() +
" is in safe mode and cannot handle " + op + " requests");
}
}
/**
* In safe mode all RPC requests will fail and return a standby exception.
* The client will try another Router, similar to the client retry logic for
* HA.
*
* @param mode True if enabled, False if disabled.
*/
public void setSafeMode(boolean mode) {
this.safeMode = mode;
}
/**
* Check if the Router is in safe mode and cannot serve RPC calls.
*
* @return If the Router is in safe mode.
*/
public boolean isInSafeMode() {
return this.safeMode;
}
@Override // ClientProtocol
public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
throws IOException {
checkOperation(OperationCategory.WRITE, false);
return null;
}
/**
* The the delegation token from each name service.
* @param renewer
* @return Name service -> Token.
* @throws IOException
*/
public Map<FederationNamespaceInfo, Token<DelegationTokenIdentifier>>
getDelegationTokens(Text renewer) throws IOException {
checkOperation(OperationCategory.WRITE, false);
return null;
}
@Override // ClientProtocol
public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
throws IOException {
checkOperation(OperationCategory.WRITE, false);
return 0;
}
@Override // ClientProtocol
public void cancelDelegationToken(Token<DelegationTokenIdentifier> token)
throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public LocatedBlocks getBlockLocations(String src, final long offset,
final long length) throws IOException {
checkOperation(OperationCategory.READ);
List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod remoteMethod = new RemoteMethod("getBlockLocations",
new Class<?>[] {String.class, long.class, long.class},
new RemoteParam(), offset, length);
return (LocatedBlocks) rpcClient.invokeSequential(locations, remoteMethod,
LocatedBlocks.class, null);
}
@Override // ClientProtocol
public FsServerDefaults getServerDefaults() throws IOException {
checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getServerDefaults");
String ns = subclusterResolver.getDefaultNamespace();
return (FsServerDefaults) rpcClient.invokeSingle(ns, method);
}
@Override // ClientProtocol
public HdfsFileStatus create(String src, FsPermission masked,
String clientName, EnumSetWritable<CreateFlag> flag,
boolean createParent, short replication, long blockSize,
CryptoProtocolVersion[] supportedVersions, String ecPolicyName)
throws IOException {
checkOperation(OperationCategory.WRITE);
if (createParent && isPathAll(src)) {
int index = src.lastIndexOf(Path.SEPARATOR);
String parent = src.substring(0, index);
LOG.debug("Creating {} requires creating parent {}", src, parent);
FsPermission parentPermissions = getParentPermission(masked);
boolean success = mkdirs(parent, parentPermissions, createParent);
if (!success) {
// This shouldn't happen as mkdirs returns true or exception
LOG.error("Couldn't create parents for {}", src);
}
}
RemoteLocation createLocation = getCreateLocation(src);
RemoteMethod method = new RemoteMethod("create",
new Class<?>[] {String.class, FsPermission.class, String.class,
EnumSetWritable.class, boolean.class, short.class,
long.class, CryptoProtocolVersion[].class,
String.class},
createLocation.getDest(), masked, clientName, flag, createParent,
replication, blockSize, supportedVersions, ecPolicyName);
return (HdfsFileStatus) rpcClient.invokeSingle(createLocation, method);
}
/**
* Get the permissions for the parent of a child with given permissions.
* Add implicit u+wx permission for parent. This is based on
* @{FSDirMkdirOp#addImplicitUwx}.
* @param mask The permission mask of the child.
* @return The permission mask of the parent.
*/
private static FsPermission getParentPermission(final FsPermission mask) {
FsPermission ret = new FsPermission(
mask.getUserAction().or(FsAction.WRITE_EXECUTE),
mask.getGroupAction(),
mask.getOtherAction());
return ret;
}
/**
* Get the location to create a file. It checks if the file already existed
* in one of the locations.
*
* @param src Path of the file to check.
* @return The remote location for this file.
* @throws IOException If the file has no creation location.
*/
protected RemoteLocation getCreateLocation(final String src)
throws IOException {
final List<RemoteLocation> locations = getLocationsForPath(src, true);
if (locations == null || locations.isEmpty()) {
throw new IOException("Cannot get locations to create " + src);
}
RemoteLocation createLocation = locations.get(0);
if (locations.size() > 1) {
try {
// Check if this file already exists in other subclusters
LocatedBlocks existingLocation = getBlockLocations(src, 0, 1);
if (existingLocation != null) {
// Forward to the existing location and let the NN handle the error
LocatedBlock existingLocationLastLocatedBlock =
existingLocation.getLastLocatedBlock();
if (existingLocationLastLocatedBlock == null) {
// The block has no blocks yet, check for the meta data
for (RemoteLocation location : locations) {
RemoteMethod method = new RemoteMethod("getFileInfo",
new Class<?>[] {String.class}, new RemoteParam());
if (rpcClient.invokeSingle(location, method) != null) {
createLocation = location;
break;
}
}
} else {
ExtendedBlock existingLocationLastBlock =
existingLocationLastLocatedBlock.getBlock();
String blockPoolId = existingLocationLastBlock.getBlockPoolId();
createLocation = getLocationForPath(src, true, blockPoolId);
}
}
} catch (FileNotFoundException fne) {
// Ignore if the file is not found
}
}
return createLocation;
}
// Medium
@Override // ClientProtocol
public LastBlockWithStatus append(String src, final String clientName,
final EnumSetWritable<CreateFlag> flag) throws IOException {
checkOperation(OperationCategory.WRITE);
List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("append",
new Class<?>[] {String.class, String.class, EnumSetWritable.class},
new RemoteParam(), clientName, flag);
return rpcClient.invokeSequential(
locations, method, LastBlockWithStatus.class, null);
}
// Low
@Override // ClientProtocol
public boolean recoverLease(String src, String clientName)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("recoverLease",
new Class<?>[] {String.class, String.class}, new RemoteParam(),
clientName);
Object result = rpcClient.invokeSequential(
locations, method, Boolean.class, Boolean.TRUE);
return (boolean) result;
}
@Override // ClientProtocol
public boolean setReplication(String src, short replication)
throws IOException {
checkOperation(OperationCategory.WRITE);
List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setReplication",
new Class<?>[] {String.class, short.class}, new RemoteParam(),
replication);
Object result = rpcClient.invokeSequential(
locations, method, Boolean.class, Boolean.TRUE);
return (boolean) result;
}
@Override
public void setStoragePolicy(String src, String policyName)
throws IOException {
checkOperation(OperationCategory.WRITE);
List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setStoragePolicy",
new Class<?>[] {String.class, String.class},
new RemoteParam(), policyName);
rpcClient.invokeSequential(locations, method, null, null);
}
@Override
public BlockStoragePolicy[] getStoragePolicies() throws IOException {
checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getStoragePolicies");
String ns = subclusterResolver.getDefaultNamespace();
return (BlockStoragePolicy[]) rpcClient.invokeSingle(ns, method);
}
@Override // ClientProtocol
public void setPermission(String src, FsPermission permissions)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setPermission",
new Class<?>[] {String.class, FsPermission.class},
new RemoteParam(), permissions);
if (isPathAll(src)) {
rpcClient.invokeConcurrent(locations, method);
} else {
rpcClient.invokeSequential(locations, method);
}
}
@Override // ClientProtocol
public void setOwner(String src, String username, String groupname)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setOwner",
new Class<?>[] {String.class, String.class, String.class},
new RemoteParam(), username, groupname);
if (isPathAll(src)) {
rpcClient.invokeConcurrent(locations, method);
} else {
rpcClient.invokeSequential(locations, method);
}
}
/**
* Excluded and favored nodes are not verified and will be ignored by
* placement policy if they are not in the same nameservice as the file.
*/
@Override // ClientProtocol
public LocatedBlock addBlock(String src, String clientName,
ExtendedBlock previous, DatanodeInfo[] excludedNodes, long fileId,
String[] favoredNodes, EnumSet<AddBlockFlag> addBlockFlags)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("addBlock",
new Class<?>[] {String.class, String.class, ExtendedBlock.class,
DatanodeInfo[].class, long.class, String[].class,
EnumSet.class},
new RemoteParam(), clientName, previous, excludedNodes, fileId,
favoredNodes, addBlockFlags);
// TODO verify the excludedNodes and favoredNodes are acceptable to this NN
return (LocatedBlock) rpcClient.invokeSequential(
locations, method, LocatedBlock.class, null);
}
/**
* Excluded nodes are not verified and will be ignored by placement if they
* are not in the same nameservice as the file.
*/
@Override // ClientProtocol
public LocatedBlock getAdditionalDatanode(final String src, final long fileId,
final ExtendedBlock blk, final DatanodeInfo[] existings,
final String[] existingStorageIDs, final DatanodeInfo[] excludes,
final int numAdditionalNodes, final String clientName)
throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getAdditionalDatanode",
new Class<?>[] {String.class, long.class, ExtendedBlock.class,
DatanodeInfo[].class, String[].class,
DatanodeInfo[].class, int.class, String.class},
new RemoteParam(), fileId, blk, existings, existingStorageIDs, excludes,
numAdditionalNodes, clientName);
return (LocatedBlock) rpcClient.invokeSequential(
locations, method, LocatedBlock.class, null);
}
@Override // ClientProtocol
public void abandonBlock(ExtendedBlock b, long fileId, String src,
String holder) throws IOException {
checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("abandonBlock",
new Class<?>[] {ExtendedBlock.class, long.class, String.class,
String.class},
b, fileId, new RemoteParam(), holder);
rpcClient.invokeSingle(b, method);
}
@Override // ClientProtocol
public boolean complete(String src, String clientName, ExtendedBlock last,
long fileId) throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("complete",
new Class<?>[] {String.class, String.class, ExtendedBlock.class,
long.class},
new RemoteParam(), clientName, last, fileId);
// Complete can return true/false, so don't expect a result
return ((Boolean) rpcClient.invokeSequential(
locations, method, Boolean.class, null)).booleanValue();
}
@Override // ClientProtocol
public LocatedBlock updateBlockForPipeline(
ExtendedBlock block, String clientName) throws IOException {
checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("updateBlockForPipeline",
new Class<?>[] {ExtendedBlock.class, String.class},
block, clientName);
return (LocatedBlock) rpcClient.invokeSingle(block, method);
}
/**
* Datanode are not verified to be in the same nameservice as the old block.
* TODO This may require validation.
*/
@Override // ClientProtocol
public void updatePipeline(String clientName, ExtendedBlock oldBlock,
ExtendedBlock newBlock, DatanodeID[] newNodes, String[] newStorageIDs)
throws IOException {
checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("updatePipeline",
new Class<?>[] {String.class, ExtendedBlock.class, ExtendedBlock.class,
DatanodeID[].class, String[].class},
clientName, oldBlock, newBlock, newNodes, newStorageIDs);
rpcClient.invokeSingle(oldBlock, method);
}
@Override // ClientProtocol
public long getPreferredBlockSize(String src) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("getPreferredBlockSize",
new Class<?>[] {String.class}, new RemoteParam());
return ((Long) rpcClient.invokeSequential(
locations, method, Long.class, null)).longValue();
}
/**
* Determines combinations of eligible src/dst locations for a rename. A
* rename cannot change the namespace. Renames are only allowed if there is an
* eligible dst location in the same namespace as the source.
*
* @param srcLocations List of all potential source destinations where the
* path may be located. On return this list is trimmed to include
* only the paths that have corresponding destinations in the same
* namespace.
* @param dst The destination path
* @return A map of all eligible source namespaces and their corresponding
* replacement value.
* @throws IOException If the dst paths could not be determined.
*/
private RemoteParam getRenameDestinations(
final List<RemoteLocation> srcLocations, final String dst)
throws IOException {
final List<RemoteLocation> dstLocations = getLocationsForPath(dst, true);
final Map<RemoteLocation, String> dstMap = new HashMap<>();
Iterator<RemoteLocation> iterator = srcLocations.iterator();
while (iterator.hasNext()) {
RemoteLocation srcLocation = iterator.next();
RemoteLocation eligibleDst =
getFirstMatchingLocation(srcLocation, dstLocations);
if (eligibleDst != null) {
// Use this dst for this source location
dstMap.put(srcLocation, eligibleDst.getDest());
} else {
// This src destination is not valid, remove from the source list
iterator.remove();
}
}
return new RemoteParam(dstMap);
}
/**
* Get first matching location.
*
* @param location Location we are looking for.
* @param locations List of locations.
* @return The first matchin location in the list.
*/
private RemoteLocation getFirstMatchingLocation(RemoteLocation location,
List<RemoteLocation> locations) {
for (RemoteLocation loc : locations) {
if (loc.getNameserviceId().equals(location.getNameserviceId())) {
// Return first matching location
return loc;
}
}
return null;
}
@Deprecated
@Override // ClientProtocol
public boolean rename(final String src, final String dst)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> srcLocations =
getLocationsForPath(src, true, false);
// srcLocations may be trimmed by getRenameDestinations()
final List<RemoteLocation> locs = new LinkedList<>(srcLocations);
RemoteParam dstParam = getRenameDestinations(locs, dst);
if (locs.isEmpty()) {
throw new IOException(
"Rename of " + src + " to " + dst + " is not allowed," +
" no eligible destination in the same namespace was found.");
}
RemoteMethod method = new RemoteMethod("rename",
new Class<?>[] {String.class, String.class},
new RemoteParam(), dstParam);
return ((Boolean) rpcClient.invokeSequential(
locs, method, Boolean.class, Boolean.TRUE)).booleanValue();
}
@Override // ClientProtocol
public void rename2(final String src, final String dst,
final Options.Rename... options) throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> srcLocations =
getLocationsForPath(src, true, false);
// srcLocations may be trimmed by getRenameDestinations()
final List<RemoteLocation> locs = new LinkedList<>(srcLocations);
RemoteParam dstParam = getRenameDestinations(locs, dst);
if (locs.isEmpty()) {
throw new IOException(
"Rename of " + src + " to " + dst + " is not allowed," +
" no eligible destination in the same namespace was found.");
}
RemoteMethod method = new RemoteMethod("rename2",
new Class<?>[] {String.class, String.class, options.getClass()},
new RemoteParam(), dstParam, options);
rpcClient.invokeSequential(locs, method, null, null);
}
@Override // ClientProtocol
public void concat(String trg, String[] src) throws IOException {
checkOperation(OperationCategory.WRITE);
// See if the src and target files are all in the same namespace
LocatedBlocks targetBlocks = getBlockLocations(trg, 0, 1);
if (targetBlocks == null) {
throw new IOException("Cannot locate blocks for target file - " + trg);
}
LocatedBlock lastLocatedBlock = targetBlocks.getLastLocatedBlock();
String targetBlockPoolId = lastLocatedBlock.getBlock().getBlockPoolId();
for (String source : src) {
LocatedBlocks sourceBlocks = getBlockLocations(source, 0, 1);
if (sourceBlocks == null) {
throw new IOException(
"Cannot located blocks for source file " + source);
}
String sourceBlockPoolId =
sourceBlocks.getLastLocatedBlock().getBlock().getBlockPoolId();
if (!sourceBlockPoolId.equals(targetBlockPoolId)) {
throw new IOException("Cannot concatenate source file " + source
+ " because it is located in a different namespace"
+ " with block pool id " + sourceBlockPoolId
+ " from the target file with block pool id "
+ targetBlockPoolId);
}
}
// Find locations in the matching namespace.
final RemoteLocation targetDestination =
getLocationForPath(trg, true, targetBlockPoolId);
String[] sourceDestinations = new String[src.length];
for (int i = 0; i < src.length; i++) {
String sourceFile = src[i];
RemoteLocation location =
getLocationForPath(sourceFile, true, targetBlockPoolId);
sourceDestinations[i] = location.getDest();
}
// Invoke
RemoteMethod method = new RemoteMethod("concat",
new Class<?>[] {String.class, String[].class},
targetDestination.getDest(), sourceDestinations);
rpcClient.invokeSingle(targetDestination, method);
}
@Override // ClientProtocol
public boolean truncate(String src, long newLength, String clientName)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("truncate",
new Class<?>[] {String.class, long.class, String.class},
new RemoteParam(), newLength, clientName);
return ((Boolean) rpcClient.invokeSequential(locations, method,
Boolean.class, Boolean.TRUE)).booleanValue();
}
@Override // ClientProtocol
public boolean delete(String src, boolean recursive) throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations =
getLocationsForPath(src, true, false);
RemoteMethod method = new RemoteMethod("delete",
new Class<?>[] {String.class, boolean.class}, new RemoteParam(),
recursive);
if (isPathAll(src)) {
return rpcClient.invokeAll(locations, method);
} else {
return rpcClient.invokeSequential(locations, method,
Boolean.class, Boolean.TRUE).booleanValue();
}
}
@Override // ClientProtocol
public boolean mkdirs(String src, FsPermission masked, boolean createParent)
throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("mkdirs",
new Class<?>[] {String.class, FsPermission.class, boolean.class},
new RemoteParam(), masked, createParent);
// Create in all locations
if (isPathAll(src)) {
return rpcClient.invokeAll(locations, method);
}
if (locations.size() > 1) {
// Check if this directory already exists
try {
HdfsFileStatus fileStatus = getFileInfo(src);
if (fileStatus != null) {
// When existing, the NN doesn't return an exception; return true
return true;
}
} catch (IOException ioe) {
// Can't query if this file exists or not.
LOG.error("Error requesting file info for path {} while proxing mkdirs",
src, ioe);
}
}
RemoteLocation firstLocation = locations.get(0);
return ((Boolean) rpcClient.invokeSingle(firstLocation, method))
.booleanValue();
}
@Override // ClientProtocol
public void renewLease(String clientName) throws IOException {
checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("renewLease",
new Class<?>[] {String.class}, clientName);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, false, false);
}
@Override // ClientProtocol
public DirectoryListing getListing(String src, byte[] startAfter,
boolean needLocation) throws IOException {
checkOperation(OperationCategory.READ);
// Locate the dir and fetch the listing
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("getListing",
new Class<?>[] {String.class, startAfter.getClass(), boolean.class},
new RemoteParam(), startAfter, needLocation);
Map<RemoteLocation, DirectoryListing> listings =
rpcClient.invokeConcurrent(
locations, method, false, false, DirectoryListing.class);
Map<String, HdfsFileStatus> nnListing = new TreeMap<>();
int totalRemainingEntries = 0;
int remainingEntries = 0;
boolean namenodeListingExists = false;
if (listings != null) {
// Check the subcluster listing with the smallest name
String lastName = null;
for (Entry<RemoteLocation, DirectoryListing> entry :
listings.entrySet()) {
RemoteLocation location = entry.getKey();
DirectoryListing listing = entry.getValue();
if (listing == null) {
LOG.debug("Cannot get listing from {}", location);
} else {
totalRemainingEntries += listing.getRemainingEntries();
HdfsFileStatus[] partialListing = listing.getPartialListing();
int length = partialListing.length;
if (length > 0) {
HdfsFileStatus lastLocalEntry = partialListing[length-1];
String lastLocalName = lastLocalEntry.getLocalName();
if (lastName == null || lastName.compareTo(lastLocalName) > 0) {
lastName = lastLocalName;
}
}
}
}
// Add existing entries
for (Object value : listings.values()) {
DirectoryListing listing = (DirectoryListing) value;
if (listing != null) {
namenodeListingExists = true;
for (HdfsFileStatus file : listing.getPartialListing()) {
String filename = file.getLocalName();
if (totalRemainingEntries > 0 && filename.compareTo(lastName) > 0) {
// Discarding entries further than the lastName
remainingEntries++;
} else {
nnListing.put(filename, file);
}
}
remainingEntries += listing.getRemainingEntries();
}
}
}
// Add mount points at this level in the tree
final List<String> children = subclusterResolver.getMountPoints(src);
if (children != null) {
// Get the dates for each mount point
Map<String, Long> dates = getMountPointDates(src);
// Create virtual folder with the mount name
for (String child : children) {
long date = 0;
if (dates != null && dates.containsKey(child)) {
date = dates.get(child);
}
// TODO add number of children
HdfsFileStatus dirStatus = getMountPointStatus(child, 0, date);
// This may overwrite existing listing entries with the mount point
// TODO don't add if already there?
nnListing.put(child, dirStatus);
}
}
if (!namenodeListingExists && nnListing.size() == 0) {
// NN returns a null object if the directory cannot be found and has no
// listing. If we didn't retrieve any NN listing data, and there are no
// mount points here, return null.
return null;
}
// Generate combined listing
HdfsFileStatus[] combinedData = new HdfsFileStatus[nnListing.size()];
combinedData = nnListing.values().toArray(combinedData);
return new DirectoryListing(combinedData, remainingEntries);
}
@Override // ClientProtocol
public HdfsFileStatus getFileInfo(String src) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getFileInfo",
new Class<?>[] {String.class}, new RemoteParam());
HdfsFileStatus ret = null;
// If it's a directory, we check in all locations
if (isPathAll(src)) {
ret = getFileInfoAll(locations, method);
} else {
// Check for file information sequentially
ret = (HdfsFileStatus) rpcClient.invokeSequential(
locations, method, HdfsFileStatus.class, null);
}
// If there is no real path, check mount points
if (ret == null) {
List<String> children = subclusterResolver.getMountPoints(src);
if (children != null && !children.isEmpty()) {
Map<String, Long> dates = getMountPointDates(src);
long date = 0;
if (dates != null && dates.containsKey(src)) {
date = dates.get(src);
}
ret = getMountPointStatus(src, children.size(), date);
}
}
return ret;
}
/**
* Get the file info from all the locations.
*
* @param locations Locations to check.
* @param method The file information method to run.
* @return The first file info if it's a file, the directory if it's
* everywhere.
* @throws IOException If all the locations throw an exception.
*/
private HdfsFileStatus getFileInfoAll(final List<RemoteLocation> locations,
final RemoteMethod method) throws IOException {
// Get the file info from everybody
Map<RemoteLocation, HdfsFileStatus> results =
rpcClient.invokeConcurrent(locations, method, HdfsFileStatus.class);
// We return the first file
HdfsFileStatus dirStatus = null;
for (RemoteLocation loc : locations) {
HdfsFileStatus fileStatus = results.get(loc);
if (fileStatus != null) {
if (!fileStatus.isDirectory()) {
return fileStatus;
} else if (dirStatus == null) {
dirStatus = fileStatus;
}
}
}
return dirStatus;
}
@Override // ClientProtocol
public boolean isFileClosed(String src) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("isFileClosed",
new Class<?>[] {String.class}, new RemoteParam());
return ((Boolean) rpcClient.invokeSequential(
locations, method, Boolean.class, Boolean.TRUE)).booleanValue();
}
@Override // ClientProtocol
public HdfsFileStatus getFileLinkInfo(String src) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getFileLinkInfo",
new Class<?>[] {String.class}, new RemoteParam());
return (HdfsFileStatus) rpcClient.invokeSequential(
locations, method, HdfsFileStatus.class, null);
}
@Override
public HdfsLocatedFileStatus getLocatedFileInfo(String src,
boolean needBlockToken) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getLocatedFileInfo",
new Class<?>[] {String.class, boolean.class}, new RemoteParam(),
Boolean.valueOf(needBlockToken));
return (HdfsLocatedFileStatus) rpcClient.invokeSequential(
locations, method, HdfsFileStatus.class, null);
}
@Override // ClientProtocol
public long[] getStats() throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("getStats");
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, long[]> results =
rpcClient.invokeConcurrent(nss, method, true, false, long[].class);
long[] combinedData = new long[STATS_ARRAY_LENGTH];
for (long[] data : results.values()) {
for (int i = 0; i < combinedData.length && i < data.length; i++) {
if (data[i] >= 0) {
combinedData[i] += data[i];
}
}
}
return combinedData;
}
@Override // ClientProtocol
public DatanodeInfo[] getDatanodeReport(DatanodeReportType type)
throws IOException {
checkOperation(OperationCategory.UNCHECKED);
return getDatanodeReport(type, true, 0);
}
/**
* Get the datanode report with a timeout.
* @param type Type of the datanode.
* @param requireResponse If we require all the namespaces to report.
* @param timeOutMs Time out for the reply in milliseconds.
* @return List of datanodes.
* @throws IOException If it cannot get the report.
*/
public DatanodeInfo[] getDatanodeReport(
DatanodeReportType type, boolean requireResponse, long timeOutMs)
throws IOException {
checkOperation(OperationCategory.UNCHECKED);
Map<String, DatanodeInfo> datanodesMap = new LinkedHashMap<>();
RemoteMethod method = new RemoteMethod("getDatanodeReport",
new Class<?>[] {DatanodeReportType.class}, type);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, DatanodeInfo[]> results =
rpcClient.invokeConcurrent(nss, method, requireResponse, false,
timeOutMs, DatanodeInfo[].class);
for (Entry<FederationNamespaceInfo, DatanodeInfo[]> entry :
results.entrySet()) {
FederationNamespaceInfo ns = entry.getKey();
DatanodeInfo[] result = entry.getValue();
for (DatanodeInfo node : result) {
String nodeId = node.getXferAddr();
if (!datanodesMap.containsKey(nodeId)) {
// Add the subcluster as a suffix to the network location
node.setNetworkLocation(
NodeBase.PATH_SEPARATOR_STR + ns.getNameserviceId() +
node.getNetworkLocation());
datanodesMap.put(nodeId, node);
} else {
LOG.debug("{} is in multiple subclusters", nodeId);
}
}
}
// Map -> Array
Collection<DatanodeInfo> datanodes = datanodesMap.values();
return toArray(datanodes, DatanodeInfo.class);
}
@Override // ClientProtocol
public DatanodeStorageReport[] getDatanodeStorageReport(
DatanodeReportType type) throws IOException {
checkOperation(OperationCategory.UNCHECKED);
Map<String, DatanodeStorageReport[]> dnSubcluster =
getDatanodeStorageReportMap(type);
// Avoid repeating machines in multiple subclusters
Map<String, DatanodeStorageReport> datanodesMap = new LinkedHashMap<>();
for (DatanodeStorageReport[] dns : dnSubcluster.values()) {
for (DatanodeStorageReport dn : dns) {
DatanodeInfo dnInfo = dn.getDatanodeInfo();
String nodeId = dnInfo.getXferAddr();
if (!datanodesMap.containsKey(nodeId)) {
datanodesMap.put(nodeId, dn);
}
// TODO merge somehow, right now it just takes the first one
}
}
Collection<DatanodeStorageReport> datanodes = datanodesMap.values();
DatanodeStorageReport[] combinedData =
new DatanodeStorageReport[datanodes.size()];
combinedData = datanodes.toArray(combinedData);
return combinedData;
}
/**
* Get the list of datanodes per subcluster.
*
* @param type Type of the datanodes to get.
* @return nsId -> datanode list.
* @throws IOException
*/
public Map<String, DatanodeStorageReport[]> getDatanodeStorageReportMap(
DatanodeReportType type) throws IOException {
Map<String, DatanodeStorageReport[]> ret = new LinkedHashMap<>();
RemoteMethod method = new RemoteMethod("getDatanodeStorageReport",
new Class<?>[] {DatanodeReportType.class}, type);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, DatanodeStorageReport[]> results =
rpcClient.invokeConcurrent(
nss, method, true, false, DatanodeStorageReport[].class);
for (Entry<FederationNamespaceInfo, DatanodeStorageReport[]> entry :
results.entrySet()) {
FederationNamespaceInfo ns = entry.getKey();
String nsId = ns.getNameserviceId();
DatanodeStorageReport[] result = entry.getValue();
ret.put(nsId, result);
}
return ret;
}
@Override // ClientProtocol
public boolean setSafeMode(SafeModeAction action, boolean isChecked)
throws IOException {
checkOperation(OperationCategory.WRITE);
// Set safe mode in all the name spaces
RemoteMethod method = new RemoteMethod("setSafeMode",
new Class<?>[] {SafeModeAction.class, boolean.class},
action, isChecked);
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, Boolean> results =
rpcClient.invokeConcurrent(
nss, method, true, !isChecked, Boolean.class);
// We only report true if all the name space are in safe mode
int numSafemode = 0;
for (boolean safemode : results.values()) {
if (safemode) {
numSafemode++;
}
}
return numSafemode == results.size();
}
@Override // ClientProtocol
public boolean restoreFailedStorage(String arg) throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("restoreFailedStorage",
new Class<?>[] {String.class}, arg);
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, Boolean> ret =
rpcClient.invokeConcurrent(nss, method, true, false, Boolean.class);
boolean success = true;
for (boolean s : ret.values()) {
if (!s) {
success = false;
break;
}
}
return success;
}
@Override // ClientProtocol
public boolean saveNamespace(long timeWindow, long txGap) throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("saveNamespace",
new Class<?>[] {Long.class, Long.class}, timeWindow, txGap);
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, Boolean> ret =
rpcClient.invokeConcurrent(nss, method, true, false, boolean.class);
boolean success = true;
for (boolean s : ret.values()) {
if (!s) {
success = false;
break;
}
}
return success;
}
@Override // ClientProtocol
public long rollEdits() throws IOException {
checkOperation(OperationCategory.WRITE);
RemoteMethod method = new RemoteMethod("rollEdits", new Class<?>[] {});
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, Long> ret =
rpcClient.invokeConcurrent(nss, method, true, false, long.class);
// Return the maximum txid
long txid = 0;
for (long t : ret.values()) {
if (t > txid) {
txid = t;
}
}
return txid;
}
@Override // ClientProtocol
public void refreshNodes() throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("refreshNodes", new Class<?>[] {});
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, true);
}
@Override // ClientProtocol
public void finalizeUpgrade() throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("finalizeUpgrade",
new Class<?>[] {});
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
@Override // ClientProtocol
public boolean upgradeStatus() throws IOException {
String methodName = getMethodName();
throw new UnsupportedOperationException(
"Operation \"" + methodName + "\" is not supported");
}
@Override // ClientProtocol
public RollingUpgradeInfo rollingUpgrade(RollingUpgradeAction action)
throws IOException {
checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod("rollingUpgrade",
new Class<?>[] {RollingUpgradeAction.class}, action);
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, RollingUpgradeInfo> ret =
rpcClient.invokeConcurrent(
nss, method, true, false, RollingUpgradeInfo.class);
// Return the first rolling upgrade info
RollingUpgradeInfo info = null;
for (RollingUpgradeInfo infoNs : ret.values()) {
if (info == null && infoNs != null) {
info = infoNs;
}
}
return info;
}
@Override // ClientProtocol
public void metaSave(String filename) throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("metaSave",
new Class<?>[] {String.class}, filename);
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
@Override // ClientProtocol
public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie)
throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(path, false);
RemoteMethod method = new RemoteMethod("listCorruptFileBlocks",
new Class<?>[] {String.class, String.class},
new RemoteParam(), cookie);
return (CorruptFileBlocks) rpcClient.invokeSequential(
locations, method, CorruptFileBlocks.class, null);
}
@Override // ClientProtocol
public void setBalancerBandwidth(long bandwidth) throws IOException {
checkOperation(OperationCategory.UNCHECKED);
RemoteMethod method = new RemoteMethod("setBalancerBandwidth",
new Class<?>[] {Long.class}, bandwidth);
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(nss, method, true, false);
}
@Override // ClientProtocol
public ContentSummary getContentSummary(String path) throws IOException {
checkOperation(OperationCategory.READ);
// Get the summaries from regular files
Collection<ContentSummary> summaries = new LinkedList<>();
FileNotFoundException notFoundException = null;
try {
final List<RemoteLocation> locations = getLocationsForPath(path, false);
RemoteMethod method = new RemoteMethod("getContentSummary",
new Class<?>[] {String.class}, new RemoteParam());
Map<RemoteLocation, ContentSummary> results =
rpcClient.invokeConcurrent(
locations, method, false, false, ContentSummary.class);
summaries.addAll(results.values());
} catch (FileNotFoundException e) {
notFoundException = e;
}
// Add mount points at this level in the tree
final List<String> children = subclusterResolver.getMountPoints(path);
if (children != null) {
for (String child : children) {
Path childPath = new Path(path, child);
try {
ContentSummary mountSummary = getContentSummary(childPath.toString());
if (mountSummary != null) {
summaries.add(mountSummary);
}
} catch (Exception e) {
LOG.error("Cannot get content summary for mount {}: {}",
childPath, e.getMessage());
}
}
}
// Throw original exception if no original nor mount points
if (summaries.isEmpty() && notFoundException != null) {
throw notFoundException;
}
return aggregateContentSummary(summaries);
}
/**
* Aggregate content summaries for each subcluster.
*
* @param summaries Collection of individual summaries.
* @return Aggregated content summary.
*/
private ContentSummary aggregateContentSummary(
Collection<ContentSummary> summaries) {
if (summaries.size() == 1) {
return summaries.iterator().next();
}
long length = 0;
long fileCount = 0;
long directoryCount = 0;
long quota = 0;
long spaceConsumed = 0;
long spaceQuota = 0;
for (ContentSummary summary : summaries) {
length += summary.getLength();
fileCount += summary.getFileCount();
directoryCount += summary.getDirectoryCount();
quota += summary.getQuota();
spaceConsumed += summary.getSpaceConsumed();
spaceQuota += summary.getSpaceQuota();
}
ContentSummary ret = new ContentSummary.Builder()
.length(length)
.fileCount(fileCount)
.directoryCount(directoryCount)
.quota(quota)
.spaceConsumed(spaceConsumed)
.spaceQuota(spaceQuota)
.build();
return ret;
}
@Override // ClientProtocol
public void fsync(String src, long fileId, String clientName,
long lastBlockLength) throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("fsync",
new Class<?>[] {String.class, long.class, String.class, long.class },
new RemoteParam(), fileId, clientName, lastBlockLength);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public void setTimes(String src, long mtime, long atime) throws IOException {
checkOperation(OperationCategory.WRITE);
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setTimes",
new Class<?>[] {String.class, long.class, long.class},
new RemoteParam(), mtime, atime);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public void createSymlink(String target, String link, FsPermission dirPerms,
boolean createParent) throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO Verify that the link location is in the same NS as the targets
final List<RemoteLocation> targetLocations =
getLocationsForPath(target, true);
final List<RemoteLocation> linkLocations =
getLocationsForPath(link, true);
RemoteLocation linkLocation = linkLocations.get(0);
RemoteMethod method = new RemoteMethod("createSymlink",
new Class<?>[] {String.class, String.class, FsPermission.class,
boolean.class},
new RemoteParam(), linkLocation.getDest(), dirPerms, createParent);
rpcClient.invokeSequential(targetLocations, method);
}
@Override // ClientProtocol
public String getLinkTarget(String path) throws IOException {
checkOperation(OperationCategory.READ);
final List<RemoteLocation> locations = getLocationsForPath(path, true);
RemoteMethod method = new RemoteMethod("getLinkTarget",
new Class<?>[] {String.class}, new RemoteParam());
return (String) rpcClient.invokeSequential(
locations, method, String.class, null);
}
@Override // Client Protocol
public void allowSnapshot(String snapshotRoot) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // Client Protocol
public void disallowSnapshot(String snapshot) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public void renameSnapshot(String snapshotRoot, String snapshotOldName,
String snapshotNewName) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // Client Protocol
public SnapshottableDirectoryStatus[] getSnapshottableDirListing()
throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public SnapshotDiffReport getSnapshotDiffReport(String snapshotRoot,
String earlierSnapshotName, String laterSnapshotName) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public SnapshotDiffReportListing getSnapshotDiffReportListing(
String snapshotRoot, String earlierSnapshotName, String laterSnapshotName,
byte[] startPath, int index) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public long addCacheDirective(CacheDirectiveInfo path,
EnumSet<CacheFlag> flags) throws IOException {
checkOperation(OperationCategory.WRITE, false);
return 0;
}
@Override // ClientProtocol
public void modifyCacheDirective(CacheDirectiveInfo directive,
EnumSet<CacheFlag> flags) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public void removeCacheDirective(long id) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public BatchedEntries<CacheDirectiveEntry> listCacheDirectives(
long prevId, CacheDirectiveInfo filter) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public void addCachePool(CachePoolInfo info) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public void modifyCachePool(CachePoolInfo info) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public void removeCachePool(String cachePoolName) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public BatchedEntries<CachePoolEntry> listCachePools(String prevKey)
throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public void modifyAclEntries(String src, List<AclEntry> aclSpec)
throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("modifyAclEntries",
new Class<?>[] {String.class, List.class},
new RemoteParam(), aclSpec);
rpcClient.invokeSequential(locations, method, null, null);
}
@Override // ClienProtocol
public void removeAclEntries(String src, List<AclEntry> aclSpec)
throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("removeAclEntries",
new Class<?>[] {String.class, List.class},
new RemoteParam(), aclSpec);
rpcClient.invokeSequential(locations, method, null, null);
}
@Override // ClientProtocol
public void removeDefaultAcl(String src) throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("removeDefaultAcl",
new Class<?>[] {String.class}, new RemoteParam());
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public void removeAcl(String src) throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("removeAcl",
new Class<?>[] {String.class}, new RemoteParam());
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public void setAcl(String src, List<AclEntry> aclSpec) throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod(
"setAcl", new Class<?>[] {String.class, List.class},
new RemoteParam(), aclSpec);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public AclStatus getAclStatus(String src) throws IOException {
checkOperation(OperationCategory.READ);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getAclStatus",
new Class<?>[] {String.class}, new RemoteParam());
return (AclStatus) rpcClient.invokeSequential(
locations, method, AclStatus.class, null);
}
@Override // ClientProtocol
public void createEncryptionZone(String src, String keyName)
throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("createEncryptionZone",
new Class<?>[] {String.class, String.class},
new RemoteParam(), keyName);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public EncryptionZone getEZForPath(String src) throws IOException {
checkOperation(OperationCategory.READ);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getEZForPath",
new Class<?>[] {String.class}, new RemoteParam());
return (EncryptionZone) rpcClient.invokeSequential(
locations, method, EncryptionZone.class, null);
}
@Override // ClientProtocol
public BatchedEntries<EncryptionZone> listEncryptionZones(long prevId)
throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public void reencryptEncryptionZone(String zone, ReencryptAction action)
throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public BatchedEntries<ZoneReencryptionStatus> listReencryptionStatus(
long prevId) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public void setXAttr(String src, XAttr xAttr, EnumSet<XAttrSetFlag> flag)
throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("setXAttr",
new Class<?>[] {String.class, XAttr.class, EnumSet.class},
new RemoteParam(), xAttr, flag);
rpcClient.invokeSequential(locations, method);
}
@SuppressWarnings("unchecked")
@Override // ClientProtocol
public List<XAttr> getXAttrs(String src, List<XAttr> xAttrs)
throws IOException {
checkOperation(OperationCategory.READ);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("getXAttrs",
new Class<?>[] {String.class, List.class}, new RemoteParam(), xAttrs);
return (List<XAttr>) rpcClient.invokeSequential(
locations, method, List.class, null);
}
@SuppressWarnings("unchecked")
@Override // ClientProtocol
public List<XAttr> listXAttrs(String src) throws IOException {
checkOperation(OperationCategory.READ);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, false);
RemoteMethod method = new RemoteMethod("listXAttrs",
new Class<?>[] {String.class}, new RemoteParam());
return (List<XAttr>) rpcClient.invokeSequential(
locations, method, List.class, null);
}
@Override // ClientProtocol
public void removeXAttr(String src, XAttr xAttr) throws IOException {
checkOperation(OperationCategory.WRITE);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(src, true);
RemoteMethod method = new RemoteMethod("removeXAttr",
new Class<?>[] {String.class, XAttr.class}, new RemoteParam(), xAttr);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public void checkAccess(String path, FsAction mode) throws IOException {
checkOperation(OperationCategory.READ);
// TODO handle virtual directories
final List<RemoteLocation> locations = getLocationsForPath(path, true);
RemoteMethod method = new RemoteMethod("checkAccess",
new Class<?>[] {String.class, FsAction.class},
new RemoteParam(), mode);
rpcClient.invokeSequential(locations, method);
}
@Override // ClientProtocol
public long getCurrentEditLogTxid() throws IOException {
checkOperation(OperationCategory.READ);
RemoteMethod method = new RemoteMethod(
"getCurrentEditLogTxid", new Class<?>[] {});
final Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
Map<FederationNamespaceInfo, Long> ret =
rpcClient.invokeConcurrent(nss, method, true, false, long.class);
// Return the maximum txid
long txid = 0;
for (long t : ret.values()) {
if (t > txid) {
txid = t;
}
}
return txid;
}
@Override // ClientProtocol
public EventBatchList getEditsFromTxid(long txid) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override
public DataEncryptionKey getDataEncryptionKey() throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override
public String createSnapshot(String snapshotRoot, String snapshotName)
throws IOException {
checkOperation(OperationCategory.WRITE);
return null;
}
@Override
public void deleteSnapshot(String snapshotRoot, String snapshotName)
throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override // ClientProtocol
public void setQuota(String path, long namespaceQuota, long storagespaceQuota,
StorageType type) throws IOException {
this.quotaCall.setQuota(path, namespaceQuota, storagespaceQuota, type);
}
@Override // ClientProtocol
public QuotaUsage getQuotaUsage(String path) throws IOException {
checkOperation(OperationCategory.READ);
return this.quotaCall.getQuotaUsage(path);
}
@Override
public void reportBadBlocks(LocatedBlock[] blocks) throws IOException {
checkOperation(OperationCategory.WRITE);
// Block pool id -> blocks
Map<String, List<LocatedBlock>> blockLocations = new HashMap<>();
for (LocatedBlock block : blocks) {
String bpId = block.getBlock().getBlockPoolId();
List<LocatedBlock> bpBlocks = blockLocations.get(bpId);
if (bpBlocks == null) {
bpBlocks = new LinkedList<>();
blockLocations.put(bpId, bpBlocks);
}
bpBlocks.add(block);
}
// Invoke each block pool
for (Entry<String, List<LocatedBlock>> entry : blockLocations.entrySet()) {
String bpId = entry.getKey();
List<LocatedBlock> bpBlocks = entry.getValue();
LocatedBlock[] bpBlocksArray =
bpBlocks.toArray(new LocatedBlock[bpBlocks.size()]);
RemoteMethod method = new RemoteMethod("reportBadBlocks",
new Class<?>[] {LocatedBlock[].class},
new Object[] {bpBlocksArray});
rpcClient.invokeSingleBlockPool(bpId, method);
}
}
@Override
public void unsetStoragePolicy(String src) throws IOException {
checkOperation(OperationCategory.WRITE, false);
}
@Override
public BlockStoragePolicy getStoragePolicy(String path) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // ClientProtocol
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
return erasureCoding.getErasureCodingPolicies();
}
@Override // ClientProtocol
public Map<String, String> getErasureCodingCodecs() throws IOException {
return erasureCoding.getErasureCodingCodecs();
}
@Override // ClientProtocol
public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException {
return erasureCoding.addErasureCodingPolicies(policies);
}
@Override // ClientProtocol
public void removeErasureCodingPolicy(String ecPolicyName)
throws IOException {
erasureCoding.removeErasureCodingPolicy(ecPolicyName);
}
@Override // ClientProtocol
public void disableErasureCodingPolicy(String ecPolicyName)
throws IOException {
erasureCoding.disableErasureCodingPolicy(ecPolicyName);
}
@Override // ClientProtocol
public void enableErasureCodingPolicy(String ecPolicyName)
throws IOException {
erasureCoding.enableErasureCodingPolicy(ecPolicyName);
}
@Override // ClientProtocol
public ErasureCodingPolicy getErasureCodingPolicy(String src)
throws IOException {
return erasureCoding.getErasureCodingPolicy(src);
}
@Override // ClientProtocol
public void setErasureCodingPolicy(String src, String ecPolicyName)
throws IOException {
erasureCoding.setErasureCodingPolicy(src, ecPolicyName);
}
@Override // ClientProtocol
public void unsetErasureCodingPolicy(String src) throws IOException {
erasureCoding.unsetErasureCodingPolicy(src);
}
@Override
public ECBlockGroupStats getECBlockGroupStats() throws IOException {
return erasureCoding.getECBlockGroupStats();
}
@Override
public ReplicatedBlockStats getReplicatedBlockStats() throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Deprecated
@Override
public BatchedEntries<OpenFileEntry> listOpenFiles(long prevId)
throws IOException {
return listOpenFiles(prevId, EnumSet.of(OpenFilesType.ALL_OPEN_FILES),
OpenFilesIterator.FILTER_PATH_DEFAULT);
}
@Override
public BatchedEntries<OpenFileEntry> listOpenFiles(long prevId,
EnumSet<OpenFilesType> openFilesTypes, String path) throws IOException {
checkOperation(OperationCategory.READ, false);
return null;
}
@Override // NamenodeProtocol
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size,
long minBlockSize) throws IOException {
return nnProto.getBlocks(datanode, size, minBlockSize);
}
@Override // NamenodeProtocol
public ExportedBlockKeys getBlockKeys() throws IOException {
return nnProto.getBlockKeys();
}
@Override // NamenodeProtocol
public long getTransactionID() throws IOException {
return nnProto.getTransactionID();
}
@Override // NamenodeProtocol
public long getMostRecentCheckpointTxId() throws IOException {
return nnProto.getMostRecentCheckpointTxId();
}
@Override // NamenodeProtocol
public CheckpointSignature rollEditLog() throws IOException {
return nnProto.rollEditLog();
}
@Override // NamenodeProtocol
public NamespaceInfo versionRequest() throws IOException {
return nnProto.versionRequest();
}
@Override // NamenodeProtocol
public void errorReport(NamenodeRegistration registration, int errorCode,
String msg) throws IOException {
nnProto.errorReport(registration, errorCode, msg);
}
@Override // NamenodeProtocol
public NamenodeRegistration registerSubordinateNamenode(
NamenodeRegistration registration) throws IOException {
return nnProto.registerSubordinateNamenode(registration);
}
@Override // NamenodeProtocol
public NamenodeCommand startCheckpoint(NamenodeRegistration registration)
throws IOException {
return nnProto.startCheckpoint(registration);
}
@Override // NamenodeProtocol
public void endCheckpoint(NamenodeRegistration registration,
CheckpointSignature sig) throws IOException {
nnProto.endCheckpoint(registration, sig);
}
@Override // NamenodeProtocol
public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
throws IOException {
return nnProto.getEditLogManifest(sinceTxId);
}
@Override // NamenodeProtocol
public boolean isUpgradeFinalized() throws IOException {
return nnProto.isUpgradeFinalized();
}
@Override // NamenodeProtocol
public boolean isRollingUpgrade() throws IOException {
return nnProto.isRollingUpgrade();
}
/**
* Locate the location with the matching block pool id.
*
* @param path Path to check.
* @param failIfLocked Fail the request if locked (top mount point).
* @param blockPoolId The block pool ID of the namespace to search for.
* @return Prioritized list of locations in the federated cluster.
* @throws IOException if the location for this path cannot be determined.
*/
private RemoteLocation getLocationForPath(
String path, boolean failIfLocked, String blockPoolId)
throws IOException {
final List<RemoteLocation> locations =
getLocationsForPath(path, failIfLocked);
String nameserviceId = null;
Set<FederationNamespaceInfo> namespaces =
this.namenodeResolver.getNamespaces();
for (FederationNamespaceInfo namespace : namespaces) {
if (namespace.getBlockPoolId().equals(blockPoolId)) {
nameserviceId = namespace.getNameserviceId();
break;
}
}
if (nameserviceId != null) {
for (RemoteLocation location : locations) {
if (location.getNameserviceId().equals(nameserviceId)) {
return location;
}
}
}
throw new IOException(
"Cannot locate a nameservice for block pool " + blockPoolId);
}
/**
* Get the possible locations of a path in the federated cluster.
* During the get operation, it will do the quota verification.
*
* @param path Path to check.
* @param failIfLocked Fail the request if locked (top mount point).
* @return Prioritized list of locations in the federated cluster.
* @throws IOException If the location for this path cannot be determined.
*/
protected List<RemoteLocation> getLocationsForPath(String path,
boolean failIfLocked) throws IOException {
return getLocationsForPath(path, failIfLocked, true);
}
/**
* Get the possible locations of a path in the federated cluster.
*
* @param path Path to check.
* @param failIfLocked Fail the request if locked (top mount point).
* @param needQuotaVerify If need to do the quota verification.
* @return Prioritized list of locations in the federated cluster.
* @throws IOException If the location for this path cannot be determined.
*/
protected List<RemoteLocation> getLocationsForPath(String path,
boolean failIfLocked, boolean needQuotaVerify) throws IOException {
try {
// Check the location for this path
final PathLocation location =
this.subclusterResolver.getDestinationForPath(path);
if (location == null) {
throw new IOException("Cannot find locations for " + path + " in " +
this.subclusterResolver);
}
// We may block some write operations
if (opCategory.get() == OperationCategory.WRITE) {
// Check if the path is in a read only mount point
if (isPathReadOnly(path)) {
if (this.rpcMonitor != null) {
this.rpcMonitor.routerFailureReadOnly();
}
throw new IOException(path + " is in a read only mount point");
}
// Check quota
if (this.router.isQuotaEnabled() && needQuotaVerify) {
RouterQuotaUsage quotaUsage = this.router.getQuotaManager()
.getQuotaUsage(path);
if (quotaUsage != null) {
quotaUsage.verifyNamespaceQuota();
quotaUsage.verifyStoragespaceQuota();
}
}
}
// Filter disabled subclusters
Set<String> disabled = namenodeResolver.getDisabledNamespaces();
List<RemoteLocation> locs = new ArrayList<>();
for (RemoteLocation loc : location.getDestinations()) {
if (!disabled.contains(loc.getNameserviceId())) {
locs.add(loc);
}
}
return locs;
} catch (IOException ioe) {
if (this.rpcMonitor != null) {
this.rpcMonitor.routerFailureStateStore();
}
throw ioe;
}
}
/**
* Check if a path should be in all subclusters.
*
* @param path Path to check.
* @return If a path should be in all subclusters.
*/
private boolean isPathAll(final String path) {
if (subclusterResolver instanceof MountTableResolver) {
try {
MountTableResolver mountTable = (MountTableResolver)subclusterResolver;
MountTable entry = mountTable.getMountPoint(path);
if (entry != null) {
return entry.isAll();
}
} catch (IOException e) {
LOG.error("Cannot get mount point", e);
}
}
return false;
}
/**
* Check if a path is in a read only mount point.
*
* @param path Path to check.
* @return If the path is in a read only mount point.
*/
private boolean isPathReadOnly(final String path) {
if (subclusterResolver instanceof MountTableResolver) {
try {
MountTableResolver mountTable = (MountTableResolver)subclusterResolver;
MountTable entry = mountTable.getMountPoint(path);
if (entry != null && entry.isReadOnly()) {
return true;
}
} catch (IOException e) {
LOG.error("Cannot get mount point", e);
}
}
return false;
}
/**
* Get the modification dates for mount points.
*
* @param path Name of the path to start checking dates from.
* @return Map with the modification dates for all sub-entries.
*/
private Map<String, Long> getMountPointDates(String path) {
Map<String, Long> ret = new TreeMap<>();
if (subclusterResolver instanceof MountTableResolver) {
try {
final List<String> children = subclusterResolver.getMountPoints(path);
for (String child : children) {
Long modTime = getModifiedTime(ret, path, child);
ret.put(child, modTime);
}
} catch (IOException e) {
LOG.error("Cannot get mount point", e);
}
}
return ret;
}
/**
* Get modified time for child. If the child is present in mount table it
* will return the modified time. If the child is not present but subdirs of
* this child are present then it will return latest modified subdir's time
* as modified time of the requested child.
* @param ret contains children and modified times.
* @param mountTable.
* @param path Name of the path to start checking dates from.
* @param child child of the requested path.
* @return modified time.
*/
private long getModifiedTime(Map<String, Long> ret, String path,
String child) {
MountTableResolver mountTable = (MountTableResolver)subclusterResolver;
String srcPath;
if (path.equals(Path.SEPARATOR)) {
srcPath = Path.SEPARATOR + child;
} else {
srcPath = path + Path.SEPARATOR + child;
}
Long modTime = 0L;
try {
// Get mount table entry for the srcPath
MountTable entry = mountTable.getMountPoint(srcPath);
// if srcPath is not in mount table but its subdirs are in mount
// table we will display latest modified subdir date/time.
if (entry == null) {
List<MountTable> entries = mountTable.getMounts(srcPath);
for (MountTable eachEntry : entries) {
// Get the latest date
if (ret.get(child) == null ||
ret.get(child) < eachEntry.getDateModified()) {
modTime = eachEntry.getDateModified();
}
}
} else {
modTime = entry.getDateModified();
}
} catch (IOException e) {
LOG.error("Cannot get mount point", e);
}
return modTime;
}
/**
* Create a new file status for a mount point.
*
* @param name Name of the mount point.
* @param childrenNum Number of children.
* @param date Map with the dates.
* @return New HDFS file status representing a mount point.
*/
private HdfsFileStatus getMountPointStatus(
String name, int childrenNum, long date) {
long modTime = date;
long accessTime = date;
FsPermission permission = FsPermission.getDirDefault();
String owner = this.superUser;
String group = this.superGroup;
try {
// TODO support users, it should be the user for the pointed folder
UserGroupInformation ugi = getRemoteUser();
owner = ugi.getUserName();
group = ugi.getPrimaryGroupName();
} catch (IOException e) {
LOG.error("Cannot get the remote user: {}", e.getMessage());
}
long inodeId = 0;
return new HdfsFileStatus.Builder()
.isdir(true)
.mtime(modTime)
.atime(accessTime)
.perm(permission)
.owner(owner)
.group(group)
.symlink(new byte[0])
.path(DFSUtil.string2Bytes(name))
.fileId(inodeId)
.children(childrenNum)
.build();
}
/**
* Get the name of the method that is calling this function.
*
* @return Name of the method calling this function.
*/
private static String getMethodName() {
final StackTraceElement[] stack = Thread.currentThread().getStackTrace();
String methodName = stack[3].getMethodName();
return methodName;
}
/**
* Get the user that is invoking this operation.
*
* @return Remote user group information.
* @throws IOException If we cannot get the user information.
*/
static UserGroupInformation getRemoteUser() throws IOException {
UserGroupInformation ugi = Server.getRemoteUser();
return (ugi != null) ? ugi : UserGroupInformation.getCurrentUser();
}
/**
* Merge the outputs from multiple namespaces.
* @param map Namespace -> Output array.
* @param clazz Class of the values.
* @return Array with the outputs.
*/
protected static <T> T[] merge(
Map<FederationNamespaceInfo, T[]> map, Class<T> clazz) {
// Put all results into a set to avoid repeats
Set<T> ret = new LinkedHashSet<>();
for (T[] values : map.values()) {
for (T val : values) {
ret.add(val);
}
}
return toArray(ret, clazz);
}
/**
* Convert a set of values into an array.
* @param set Input set.
* @param clazz Class of the values.
* @return Array with the values in set.
*/
private static <T> T[] toArray(Collection<T> set, Class<T> clazz) {
@SuppressWarnings("unchecked")
T[] combinedData = (T[]) Array.newInstance(clazz, set.size());
combinedData = set.toArray(combinedData);
return combinedData;
}
/**
* Get quota module implement.
*/
public Quota getQuotaModule() {
return this.quotaCall;
}
/**
* Get RPC metrics info.
* @return The instance of FederationRPCMetrics.
*/
public FederationRPCMetrics getRPCMetrics() {
return this.rpcMonitor.getRPCMetrics();
}
}
|
szegedim/hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
|
Java
|
apache-2.0
| 92,997 |
// Copyright 2017 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.downloader;
import java.net.URI;
import java.nio.file.Path;
public interface Downloader {
void download(URI uri, Path destination) throws Exception;
}
|
lucperkins/heron
|
heron/downloaders/src/java/com/twitter/heron/downloader/Downloader.java
|
Java
|
apache-2.0
| 793 |
package org.strategoxt.lang.gradual;
import java.io.Serializable;
public interface Type extends Serializable {
boolean equals(Object o);
int hashCode();
public String toString();
}
|
metaborg/strategoxt
|
strategoxt/stratego-libraries/java-backend/java/runtime/org/strategoxt/lang/gradual/Type.java
|
Java
|
apache-2.0
| 195 |
/*
* Copyright 2015 Marek Kaput
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rustidea.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.psi.tree.TokenSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.rustidea.psi.IRsType;
import org.rustidea.psi.RsElementVisitor;
import org.rustidea.psi.RsStruct;
import org.rustidea.psi.RsTypeParameterList;
import org.rustidea.psi.types.RsPsiTypes;
import org.rustidea.stubs.RsStructStub;
public class RsStructImpl extends IRsNamedItemPsiElement<RsStructStub> implements RsStruct {
private static final TokenSet STRUCT_OR_TUPLE_TYPE = TokenSet.create(RsPsiTypes.STRUCT_TYPE, RsPsiTypes.TUPLE_TYPE);
public RsStructImpl(@NotNull RsStructStub stub) {
super(stub, RsPsiTypes.STRUCT);
}
public RsStructImpl(@NotNull ASTNode node) {
super(node);
}
@Nullable
@Override
public RsTypeParameterList getTypeParameterList() {
return findChildByType(RsPsiTypes.TYPE_PARAMETER_LIST);
}
@Nullable
@Override
public IRsType getDefinition() {
return findChildByType(STRUCT_OR_TUPLE_TYPE);
}
@Override
public void accept(@NotNull RsElementVisitor visitor) {
visitor.visitStruct(this);
}
}
|
jajakobyly/rustidea
|
src/org/rustidea/psi/impl/RsStructImpl.java
|
Java
|
apache-2.0
| 1,812 |
Ext.define('MCLM.view.cenarios.GerenciarGruposCenarioWindow', {
requires: [
'MCLM.store.Grupo',
'Ext.grid.plugin.DragDrop',
'MCLM.view.cenarios.GerenciarGruposCenarioController'
],
extend: 'Ext.window.Window',
id: 'gerenciarGruposCenarioWindow',
itemId: 'gerenciarGruposCenarioWindow',
controller: 'gerenciar-grupos-cenario',
modal: true,
width: '60%',
height: 500,
layout: {
type: 'vbox',
align: 'stretch',
pack: 'start'
},
tbar: [
{iconCls: 'save-icon', tooltip: '<b>Salvar Alterações</b>', handler: 'onSaveBtnClick'}
],
items: [
{
xtype: 'container', layout: 'hbox',
height: '50%', width: '100%',
items: [
{
itemId: 'associatedGroupsGrid', xtype: 'grid',
title: 'Grupos Associados', titleAlign: 'center',
scrollable: true, width: '50%',
height: '100%', store: {
proxy: 'memory',
sorters: ['name'],
autoSort: true
},
tools: [{
iconCls: 'group-add-icon',
tooltip: '<b>Compartilhar com Grupos</b>',
handler: () => Ext.getCmp('gerenciarGruposCenarioWindow').down('#groupsGrid').expand()
}],
columns: [
{
dataIndex: 'name', text: 'Nome',
width: '90%', items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onAssociatedGroupFilterKeyup',
buffer: 500
}
}
]
},
{
xtype: 'actioncolumn', width: '5%',
items: [
{iconCls: "details-icon", tooltip: 'Detalhes', handler: 'onGroupDetailsBtnClick'}
]
},
{
xtype: 'actioncolumn', width: '5%',
items: [
{tooltip: 'Remover Associação', iconCls: 'cancel-icon', handler: 'onAssociatedGroupRemoveBtnClick'}
]}
]
},
{
itemId: 'groupsGrid', xtype: 'grid',
title: 'Grupos', titleAlign: 'center',
scrollable: true, width: '50%',
collapsed: true, collapsible: true,
animCollapse: true, height: '100%',
store: {
type: 'grupo',
pageSize: 0
},
columns: [
{
dataIndex: 'name', text: 'Nome',
width: '90%', items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onGroupFilterKeyup',
buffer: 500
}
}
]
},
{
xtype: 'actioncolumn', width: '5%',
items: [
{iconCls: 'details-icon', tooltip: 'Detalhes', handler: 'onGroupDetailsBtnClick'}
]
},
{
xtype: 'actioncolumn', width: '5%',
items: [
{tooltip: 'Associar', iconCls: 'plus-icon', handler: 'onGroupAssociationBtnClick'}
]}
],
listeners: {
rowdblclick: 'onGroupRowDblClick'
}
}
]
},
{
xtype: 'container', layout: 'hbox',
height: '50%', width: '100%',
items: [
{
itemId: 'associatedUsersGrid', xtype: 'grid',
titleAlign: 'center', scrollable: true,
title: 'Usuários Associados', width: '50%',
height: '100%', store: {
proxy: 'memory',
sorters: ['nome'],
autoSort: true
},
tools: [{
iconCls: 'user-add-icon',
tooltip: '<b>Compartilhar com Usuários</b>',
handler: () => Ext.getCmp('gerenciarGruposCenarioWindow').down('#usersGrid').expand()
}],
columns: [
{
dataIndex: 'cpf', text: 'CPF',
renderer: ColumnRenderer.cpf, width: '30%',
items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onAssociatedUserFilterKeyup',
buffer: 500
}
}
]
},
{
dataIndex: 'nome', text: 'Nome',
width: '40%', items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onAssociatedUserFilterKeyup',
buffer: 500
}
}
]
},
{dataIndex: 'siglaOm', text: 'OM', width: '10%'},
{dataIndex: 'siglaForca', text: 'Força', width: '10%'},
{xtype: 'actioncolumn', width: '10%', items: [
{tooltip: 'Remover', iconCls: 'cancel-icon', handler: 'onAssociatedUserRemoveBtnClick'}
]}
]
},
{
itemId: 'usersGrid', xtype: 'grid',
titleAlign: 'center', scrollable: true,
title: 'Usuários', width: '50%',
collapsed: true, collapsible: true,
animCollapse: true, height: '100%',
store: {
type: 'apolo-user',
autoLoad: true,
pageSize: 10
},
columns: [
{
dataIndex: 'cpf', text: 'CPF',
renderer: ColumnRenderer.cpf, width: '30%',
items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onUserFilterKeyup',
buffer: 500
}
}
]
},
{
dataIndex: 'nome', text: 'Nome',
width: '40%', items: [
{
xtype: 'textfield',
emptyText: 'Filtrar',
enableKeyEvents: true,
listeners: {
keyup: 'onUserFilterKeyup',
buffer: 500
}
}
]
},
{dataIndex: 'siglaOm', text: 'OM', width: '10%'},
{dataIndex: 'siglaForca', text: 'Força', width: '10%'},
{xtype: 'actioncolumn', width: '10%', items: [
{tooltip: 'Associar', iconCls: 'plus-icon', handler: 'onUserAssociationBtnClick'}
]}
],
listeners: {
rowdblclick: 'onUserRowDblClick'
},
dockedItems: [{
xtype: 'pagingtoolbar',
store: {
type: 'apolo-user',
autoLoad: true,
pageSize: 10
}, // same store GridPanel is using
dock: 'bottom',
displayInfo: true
}]
}
]
}
],
listeners: {show: 'onShow'}
});
|
icemagno/mclm
|
src/main/webapp/app/view/cenarios/GerenciarGruposCenarioWindow.js
|
JavaScript
|
apache-2.0
| 10,122 |
package org.agilewiki.jactor2.core.impl;
import java.util.Timer;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.blockwithme.util.base.SystemUtils;
/**
* A scheduler for Plant, created by PlantConfiguration.
*/
public class JActorStTestPlantScheduler implements
org.agilewiki.jactor2.core.plant.PlantScheduler {
@SuppressWarnings("rawtypes")
private class MyTimerTask extends MyAbstractTimerTask {
private volatile Runnable runnable;
private volatile boolean cancelled;
private volatile boolean done;
private final boolean once;
public MyTimerTask(final Runnable runnable, final boolean once) {
this.runnable = runnable;
this.once = once;
}
/* (non-Javadoc)
* @see java.util.TimerTask#run()
*/
@Override
public void run() {
if (die) {
cancel();
runnable = null;
timer.purge();
} else {
if (once) {
done = true;
}
runnable.run();
}
}
/* (non-Javadoc)
* @see java.util.concurrent.Delayed#getDelay(java.util.concurrent.TimeUnit)
*/
@Override
public long getDelay(final TimeUnit unit) {
return unit.convert(
scheduledExecutionTime() - System.currentTimeMillis(),
TimeUnit.MILLISECONDS);
}
@Override
public boolean cancel() {
cancelled = true;
return super.cancel();
}
/* (non-Javadoc)
* @see java.util.concurrent.Future#isCancelled()
*/
@Override
public boolean isCancelled() {
return cancelled;
}
/* (non-Javadoc)
* @see java.util.concurrent.Future#cancel(boolean)
*/
@Override
public boolean cancel(final boolean mayInterruptIfRunning) {
return cancel();
}
/* (non-Javadoc)
* @see java.util.concurrent.Future#isDone()
*/
@Override
public boolean isDone() {
return done;
}
/* (non-Javadoc)
* @see java.util.concurrent.Future#get()
*/
@Override
public Object get() throws InterruptedException, ExecutionException {
if (done) {
return null;
}
throw new InterruptedException();
}
/* (non-Javadoc)
* @see java.util.concurrent.Future#get(long, java.util.concurrent.TimeUnit)
*/
@Override
public Object get(final long timeout, final TimeUnit unit)
throws InterruptedException, ExecutionException,
TimeoutException {
if (done) {
return null;
}
throw new InterruptedException();
}
}
private volatile long currentTimeMillis;
private volatile boolean die;
private final Timer timer;
/**
* Creates the default plantScheduler.
*/
public JActorStTestPlantScheduler() {
timer = SystemUtils.getTimer();
currentTimeMillis = System.currentTimeMillis();
timer.scheduleAtFixedRate(new MyTimerTask(new Runnable() {
@Override
public void run() {
currentTimeMillis = System.currentTimeMillis();
}
}, false), getHeartbeatMillis(), getHeartbeatMillis());
}
/**
* Controls how often currentTimeMillis is updated: every 500 milliseconds.
*
* @return The number of milliseconds between updates to currentTimeMillis.
*/
protected long getHeartbeatMillis() {
return 500;
}
/**
* Determines the size of the scheduledThreadPool: 2.
*
* @return Returns the number of threads in the scheduledThreadPool.
*/
protected int getSchedulerPoolSize() {
return 1;
}
@Override
public double currentTimeMillis() {
return currentTimeMillis;
}
@Override
public ScheduledFuture<?> schedule(final Runnable runnable,
final int _millisecondDelay) {
final MyTimerTask result = new MyTimerTask(runnable, true);
timer.schedule(result, _millisecondDelay);
return result;
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(final Runnable runnable,
final int _millisecondDelay) {
final MyTimerTask result = new MyTimerTask(runnable, false);
timer.scheduleAtFixedRate(result, _millisecondDelay, _millisecondDelay);
return result;
}
@Override
public void close() {
// No way to get the tasks from the Timer. :(
die = true;
}
/* (non-Javadoc)
* @see org.agilewiki.jactor2.core.plant.PlantScheduler#cancel(java.lang.Object)
*/
@Override
public void cancel(final Object task) {
if (task == null) {
throw new NullPointerException("task");
}
if (!(task instanceof ScheduledFuture)) {
throw new IllegalArgumentException("task: " + task.getClass());
}
((ScheduledFuture<?>) task).cancel(false);
}
}
|
skunkiferous/Util
|
jactor2-coreSt/src/test/java/org/agilewiki/jactor2/core/impl/JActorStTestPlantScheduler.java
|
Java
|
apache-2.0
| 5,403 |
package licenseutils
import (
"context"
"github.com/docker/licensing"
"github.com/docker/licensing/model"
)
type (
fakeLicensingClient struct {
loginViaAuthFunc func(ctx context.Context, username, password string) (authToken string, err error)
getHubUserOrgsFunc func(ctx context.Context, authToken string) (orgs []model.Org, err error)
getHubUserByNameFunc func(ctx context.Context, username string) (user *model.User, err error)
verifyLicenseFunc func(ctx context.Context, license model.IssuedLicense) (res *model.CheckResponse, err error)
generateNewTrialSubscriptionFunc func(ctx context.Context, authToken, dockerID string) (subscriptionID string, err error)
listSubscriptionsFunc func(ctx context.Context, authToken, dockerID string) (response []*model.Subscription, err error)
listSubscriptionsDetailsFunc func(ctx context.Context, authToken, dockerID string) (response []*model.SubscriptionDetail, err error)
downloadLicenseFromHubFunc func(ctx context.Context, authToken, subscriptionID string) (license *model.IssuedLicense, err error)
parseLicenseFunc func(license []byte) (parsedLicense *model.IssuedLicense, err error)
storeLicenseFunc func(ctx context.Context, dclnt licensing.WrappedDockerClient, licenses *model.IssuedLicense, localRootDir string) error
loadLocalLicenseFunc func(ctx context.Context, dclnt licensing.WrappedDockerClient) (*model.Subscription, error)
summarizeLicenseFunc func(*model.CheckResponse, string) *model.Subscription
}
)
func (c *fakeLicensingClient) LoginViaAuth(ctx context.Context, username, password string) (authToken string, err error) {
if c.loginViaAuthFunc != nil {
return c.loginViaAuthFunc(ctx, username, password)
}
return "", nil
}
func (c *fakeLicensingClient) GetHubUserOrgs(ctx context.Context, authToken string) (orgs []model.Org, err error) {
if c.getHubUserOrgsFunc != nil {
return c.getHubUserOrgsFunc(ctx, authToken)
}
return nil, nil
}
func (c *fakeLicensingClient) GetHubUserByName(ctx context.Context, username string) (user *model.User, err error) {
if c.getHubUserByNameFunc != nil {
return c.getHubUserByNameFunc(ctx, username)
}
return nil, nil
}
func (c *fakeLicensingClient) VerifyLicense(ctx context.Context, license model.IssuedLicense) (res *model.CheckResponse, err error) {
if c.verifyLicenseFunc != nil {
return c.verifyLicenseFunc(ctx, license)
}
return nil, nil
}
func (c *fakeLicensingClient) GenerateNewTrialSubscription(ctx context.Context, authToken, dockerID string) (subscriptionID string, err error) {
if c.generateNewTrialSubscriptionFunc != nil {
return c.generateNewTrialSubscriptionFunc(ctx, authToken, dockerID)
}
return "", nil
}
func (c *fakeLicensingClient) ListSubscriptions(ctx context.Context, authToken, dockerID string) (response []*model.Subscription, err error) {
if c.listSubscriptionsFunc != nil {
return c.listSubscriptionsFunc(ctx, authToken, dockerID)
}
return nil, nil
}
func (c *fakeLicensingClient) ListSubscriptionsDetails(ctx context.Context, authToken, dockerID string) (response []*model.SubscriptionDetail, err error) {
if c.listSubscriptionsDetailsFunc != nil {
return c.listSubscriptionsDetailsFunc(ctx, authToken, dockerID)
}
return nil, nil
}
func (c *fakeLicensingClient) DownloadLicenseFromHub(ctx context.Context, authToken, subscriptionID string) (license *model.IssuedLicense, err error) {
if c.downloadLicenseFromHubFunc != nil {
return c.downloadLicenseFromHubFunc(ctx, authToken, subscriptionID)
}
return nil, nil
}
func (c *fakeLicensingClient) ParseLicense(license []byte) (parsedLicense *model.IssuedLicense, err error) {
if c.parseLicenseFunc != nil {
return c.parseLicenseFunc(license)
}
return nil, nil
}
func (c *fakeLicensingClient) StoreLicense(ctx context.Context, dclnt licensing.WrappedDockerClient, licenses *model.IssuedLicense, localRootDir string) error {
if c.storeLicenseFunc != nil {
return c.storeLicenseFunc(ctx, dclnt, licenses, localRootDir)
}
return nil
}
func (c *fakeLicensingClient) LoadLocalLicense(ctx context.Context, dclnt licensing.WrappedDockerClient) (*model.Subscription, error) {
if c.loadLocalLicenseFunc != nil {
return c.loadLocalLicenseFunc(ctx, dclnt)
}
return nil, nil
}
func (c *fakeLicensingClient) SummarizeLicense(cr *model.CheckResponse, keyid string) *model.Subscription {
if c.summarizeLicenseFunc != nil {
return c.summarizeLicenseFunc(cr, keyid)
}
return nil
}
|
cpuguy83/cli
|
internal/licenseutils/client_test.go
|
GO
|
apache-2.0
| 4,580 |
/* global QUnit */
sap.ui.define([
"sap/ui/core/Control",
"sap/ui/fl/write/api/SmartVariantManagementWriteAPI",
"sap/ui/fl/Layer",
"sap/ui/rta/command/CommandFactory",
"sap/ui/thirdparty/sinon-4"
], function(
Control,
SmartVariantManagementWriteAPI,
Layer,
CommandFactory,
sinon
) {
"use strict";
var sandbox = sinon.createSandbox();
QUnit.module("Given a control", {
beforeEach: function() {
this.oControl = new Control();
},
afterEach: function() {
this.oControl.destroy();
sandbox.restore();
}
}, function() {
QUnit.test("Update in the Save scenario", function(assert) {
var oUpdateCommand;
var sVariantId = "variantId";
var oContent = {foo: "bar"};
var oUpdateControlStub = sandbox.stub();
this.oControl.updateVariant = oUpdateControlStub;
var oSetModifiedStub = sandbox.stub();
this.oControl.setModified = oSetModifiedStub;
var oUpdateFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "updateVariant");
var oUndoVariantFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "revert");
return CommandFactory.getCommandFor(this.oControl, "compVariantUpdate", {
newVariantProperties: {
variantId: {
content: oContent
}
},
onlySave: true
}, {})
.then(function(oCreatedCommand) {
oUpdateCommand = oCreatedCommand;
return oUpdateCommand.execute();
}).then(function() {
assert.equal(oUpdateFlAPIStub.callCount, 1, "the FL update function was called");
var mExpectedProperties = {
id: sVariantId,
control: this.oControl,
content: oContent,
generator: "sap.ui.rta.command",
command: "compVariantUpdate",
layer: Layer.CUSTOMER
};
assert.deepEqual(oUpdateFlAPIStub.lastCall.args[0], mExpectedProperties, "the FL API was called with the correct properties");
assert.equal(oSetModifiedStub.callCount, 1, "the setModified was called..");
assert.equal(oSetModifiedStub.lastCall.args[0], false, "and set to false");
return oUpdateCommand.undo();
}.bind(this)).then(function() {
assert.equal(oUndoVariantFlAPIStub.callCount, 1, "the undo function was called");
assert.equal(oSetModifiedStub.callCount, 2, "the setModified was called again..");
assert.equal(oSetModifiedStub.lastCall.args[0], true, "and set to true");
return oUpdateCommand.execute();
}).then(function() {
assert.equal(oUpdateFlAPIStub.callCount, 2, "the FL update function was called again");
var mExpectedProperties = {
id: sVariantId,
control: this.oControl,
content: oContent,
generator: "sap.ui.rta.command",
command: "compVariantUpdate",
layer: Layer.CUSTOMER
};
assert.deepEqual(oUpdateFlAPIStub.lastCall.args[0], mExpectedProperties, "the FL API was called with the correct properties");
assert.equal(oSetModifiedStub.callCount, 3, "the setModified was called again..");
assert.equal(oSetModifiedStub.lastCall.args[0], false, "and set to false");
}.bind(this));
});
QUnit.test("Update in the Manage Views scenario", function(assert) {
var oUpdateCommand;
var oUpdateControlStub = sandbox.stub();
this.oControl.updateVariant = oUpdateControlStub;
var oRemoveControlStub = sandbox.stub();
this.oControl.removeVariant = oRemoveControlStub;
var oAddControlStub = sandbox.stub();
this.oControl.addVariant = oAddControlStub;
var oSetDefaultControlStub = sandbox.stub();
this.oControl.setDefaultVariantId = oSetDefaultControlStub;
var oSetModifiedStub = sandbox.stub();
this.oControl.setModified = oSetModifiedStub;
var oUpdateFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "updateVariant").callsFake(function(mPropertyBag) {
return mPropertyBag.id;
});
var oSetDefaultFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "setDefaultVariantId");
var oRevertDefaultFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "revertSetDefaultVariantId");
var oRemoveVariantFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "removeVariant");
var oRevertFlAPIStub = sandbox.stub(SmartVariantManagementWriteAPI, "revert").callsFake(function(mPropertyBag) {
return mPropertyBag.id;
});
function assertExecute(oControl) {
assert.equal(oUpdateFlAPIStub.callCount, 2, "the FL update function was called twice");
var mExpectedProperties1 = {
id: "variant2",
control: oControl,
generator: "sap.ui.rta.command",
command: "compVariantUpdate",
layer: Layer.CUSTOMER,
favorite: false
};
assert.deepEqual(oUpdateFlAPIStub.getCall(0).args[0], mExpectedProperties1, "the FL API was called with the correct properties 2");
var mExpectedProperties2 = {
id: "variant3",
control: oControl,
generator: "sap.ui.rta.command",
command: "compVariantUpdate",
layer: Layer.CUSTOMER,
executeOnSelection: true,
name: "newName",
oldName: "oldName",
favorite: true
};
assert.deepEqual(oUpdateFlAPIStub.getCall(1).args[0], mExpectedProperties2, "the FL API was called with the correct properties 3");
assert.equal(oSetDefaultFlAPIStub.callCount, 1, "the FL API setDefault was called");
assert.equal(oSetDefaultFlAPIStub.lastCall.args[0].defaultVariantId, "variant3", "the correct variant id was passed");
assert.equal(oRemoveVariantFlAPIStub.callCount, 1, "the FL API removeVariant was called");
assert.equal(oRemoveVariantFlAPIStub.lastCall.args[0].id, "variant1", "the correct variant id was passed");
assert.equal(oUpdateControlStub.callCount, 2, "the control API updateVariant was called twice");
assert.equal(oUpdateControlStub.getCall(0).args[0], "variant2", "with the return value of FL updateVariant");
assert.equal(oUpdateControlStub.getCall(1).args[0], "variant3", "with the return value of FL updateVariant");
assert.equal(oSetDefaultControlStub.callCount, 1, "the control API setDefault was called");
assert.equal(oSetDefaultControlStub.lastCall.args[0], "variant3", "the correct variant id was passed");
assert.equal(oRemoveControlStub.callCount, 1, "the control API removeVariant was called");
assert.equal(oRemoveControlStub.lastCall.args[0].variantId, "variant1", "the correct variant id was passed");
}
return CommandFactory.getCommandFor(this.oControl, "compVariantUpdate", {
newVariantProperties: {
variant1: {
executeOnSelection: false,
deleted: true
},
variant2: {
favorite: false
},
variant3: {
executeOnSelection: true,
name: "newName",
oldName: "oldName",
favorite: true
}
},
newDefaultVariantId: "variant3",
oldDefaultVariantId: "variant1"
}, {})
.then(function(oCreatedCommand) {
oUpdateCommand = oCreatedCommand;
return oUpdateCommand.execute();
}).then(function() {
assertExecute(this.oControl);
return oUpdateCommand.undo();
}.bind(this)).then(function() {
assert.equal(oRevertFlAPIStub.callCount, 3, "the revert function was called thrice");
assert.equal(oRevertFlAPIStub.getCall(0).args[0].id, "variant1", "the correct variant id was passed 1");
assert.equal(oRevertFlAPIStub.getCall(1).args[0].id, "variant2", "the correct variant id was passed 2");
assert.equal(oRevertFlAPIStub.getCall(2).args[0].id, "variant3", "the correct variant id was passed 3");
assert.equal(oRevertDefaultFlAPIStub.callCount, 1, "the revertSetDefaultVariantId function was called once");
assert.equal(oAddControlStub.lastCall.args[0], "variant1", "the correct variant was added");
assert.equal(oAddControlStub.callCount, 1, "the addVariant function on the control was called once");
assert.equal(oAddControlStub.lastCall.args[0], "variant1", "the correct variant was added");
assert.equal(oUpdateControlStub.callCount, 4, "the updateVariant function on the control was called twice");
assert.equal(oUpdateControlStub.getCall(2).args[0], "variant2", "the correct variant was updated 1");
assert.equal(oUpdateControlStub.getCall(3).args[0], "variant3", "the correct variant was updated 2");
sandbox.resetHistory();
return oUpdateCommand.execute();
}).then(function() {
assertExecute(this.oControl);
}.bind(this));
});
});
});
|
SAP/openui5
|
src/sap.ui.rta/test/sap/ui/rta/qunit/command/compVariant/CompVariantUpdate.qunit.js
|
JavaScript
|
apache-2.0
| 8,251 |
#!/astro/apps/pkg/python/bin/python
import pyfits
import SDSSfits
import numpy
from tools import create_fits
import os
def main(OUT_DIR = "/astro/net/scratch1/vanderplas/SDSS_GAL_RESTFRAME/",
DIR_ROOT = "/astro/net/scratch1/sdssspec/spectro/1d_26/*/1d",
LINES_FILE = "LINES_SHORT.TXT",
z_min = 0.0, #zmax is set such that SII lines will
z_max = 0.36, # fall in range of 3830 to 9200 angstroms
rebin_coeff0 = 3.583, # rebin parameters give a wavelength
rebin_coeff1 = 0.0002464, # range from 3830A to 9200A
rebin_length = 1000,
remove_sky_absorption = True,
normalize = True):
LINES = []
KEYS = ['TARGET','Z','Z_ERR','SPEC_CLN','MAG_G','MAG_R','MAG_I','N_BAD_PIX']
if LINES_FILE is not None:
for line in open(LINES_FILE):
line = line.split()
if len(line)==0:continue
W = float(line[0])
if W<3000 or W>7000:continue
LINES.append('%.2f'%W)
for info in ('flux','dflux','width','dwidth','nsigma'):
KEYS.append('%.2f_%s' % (W,info) )
for SET in os.listdir(DIR_ROOT.split('*')[0]):
if not SET.isdigit():
continue
DIR = DIR_ROOT.replace('*',SET)
if not os.path.exists(DIR):
continue
OUT_FILE = os.path.join(OUT_DIR,SET+'.dat')
print 'writing %s' % os.path.join(OUT_DIR,SET+'.dat')
col_dict = dict([(KEY,[]) for KEY in KEYS])
spec_list = []
NUMS = []
for F in os.listdir(DIR):
if not F.endswith('.fit'): continue
num = int( F.strip('.fit').split('-')[-1] )
if num in NUMS:
#print " - already measured: skipping %s" % F
continue
#open hdu file and glean necessary info
SPEC = SDSSfits.SDSSfits(os.path.join(DIR,F),LINES)
if SPEC.D['SPEC_CLN'] not in (1,2,3,4):
continue
if SPEC.z<z_min:
#print " - negative z: skipping %s" % F
continue
if SPEC.z>z_max:
#print " - z>z_max: skipping %s" % F
continue
if SPEC.numlines == 0:
#print " - no line measurements: skipping %s" % F
continue
if remove_sky_absorption:
#cover up strong oxygen absorption
SPEC.remove_O_lines()
#move to restframe, rebin, and normalize
SPEC.move_to_restframe()
try:
SPEC = SPEC.rebin(rebin_coeff0,rebin_coeff1,rebin_length)
except:
print " rebin failed. Skipping %s" % F
continue
if normalize:
try:
SPEC.normalize()
except:
print " normalize failed. Skipping %s" % F
continue
if min(SPEC.spectrum) < -4*max(SPEC.spectrum):
print " goes too far negative. Skipping %s" % F
NUMS.append(num)
spec_list.append(SPEC.spectrum.tolist())
for KEY in KEYS:
col_dict[KEY].append(SPEC.D[KEY])
del SPEC
if os.path.exists(OUT_FILE):
os.system('rm %s' % OUT_FILE)
col_dict['coeff0'] = rebin_coeff0
col_dict['coeff1'] = rebin_coeff1
create_fits(OUT_FILE,numpy.asarray( spec_list ),**col_dict)
print " - wrote %i spectra" % len(NUMS)
if __name__ == '__main__':
main(OUT_DIR = "/astro/net/scratch1/vanderplas/SDSS_GAL_RESTFRAME/",
DIR_ROOT = "/astro/net/scratch1/sdssspec/spectro/1d_26/*/1d",
#LINES_FILE = "LINES_SHORT.TXT",
LINES_FILE = None,
z_min = 0.0, #zmax is set such that SII lines will
z_max = 0.36, # fall in range of 3830 to 9200 angstroms
rebin_coeff0 = 3.583, # rebin parameters give a wavelength
rebin_coeff1 = 0.0002464, # range from 3830A to 9200A
rebin_length = 1000,
remove_sky_absorption = False,
normalize = False)
|
excelly/xpy-ml
|
sdss/jake_lib/make_condensed_fits.py
|
Python
|
apache-2.0
| 4,199 |
# -*- coding: utf-8 -*-
'''
部分通用的数据结构
container.py :
NetInterface : 描述容器的一个虚拟网卡
-name : 虚拟网卡名称
-hostVeth : 虚拟网卡对应的主机veth名称
-ip : IP地址
-mac : mac地址
-vethMac : 主机veth的mac地址
+ NetInterface::create : 创建一个虚拟网卡,返回NetInterface对象
container : 目标容器
vName : 容器端peer名字
h_vName : 主机端peer的名字
Container : 描述一个容器的数据结构,可持久化存储
-host : 容器所属的主机
-pid : 主机中容器的pid
-id : docker daemon 赋予容器的ID
-ifaces [list] : 容器的虚拟网卡列表 ,为Interface对象集合
-netns : 容器的网络命名空间,为NetworkNamespace对象实例
-image : 创建容器所用的镜像名称
-dataDirectory : 容器数据存储路径
-createTime : 创建时间
-state : 当前运行状态
-belongsTo : 所属用户
+attachToNetworkNamespace : 加入一个命名空间
netns : 要加入的命名空间对象
+detachNetworkNamespace : 离开命名空间
netns : 要离开的命名空间对象
net.py :
NetworkNamespace : 描述网络命名空间的数据结构
-uid : 唯一ID,初始化时通过uuid函数生成
-addrs [list] : 网络命名空间所属IP,可谓多个,为cidr地址
-containers : 加入网络的容器
-initHost : 初始化该命名空间时,该命名空间所属的主机
-createTime : 创建时间
-belongsTo : 所属用户
utils.py:
Host : 描述主机的数据结构
-mac : mac地址
-transportIp : 数据传输所用IP
-containers : 主机所包行的容器,为Container对象列表
-proxys : 主机上的容器创建代理代理列表
+getConcreteProxy :获取特定的容器创建代理类型
ProxyClass : 代理类型
Switch : 描述主机上安装着的虚拟交换机
-host : 所属主机
-portsToContainers : 交换机端口和容器的对应关系
-portsInfo : 每个端口的相关信息
-bridgeName : 网桥名称
exceptions.py :
ContainerCreatorTypeInvalidError : 容器创建器与容器创建代理类型不匹配
tools.py :
'''
|
onlysheep5200/NetnsEx
|
lib/__init__.py
|
Python
|
apache-2.0
| 2,175 |
package com.example.android.sunshine.app.widget;
import android.annotation.TargetApi;
import android.app.PendingIntent;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.v4.app.TaskStackBuilder;
import android.util.Log;
import android.widget.RemoteViews;
import com.example.android.sunshine.app.DetailActivity;
import com.example.android.sunshine.app.MainActivity;
import com.example.android.sunshine.app.R;
import com.example.android.sunshine.app.Utility;
import com.example.android.sunshine.app.sync.SunshineSyncAdapter;
/**
* Provider for a scrollable weather detail widget
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public class DetailWidgetProvider extends AppWidgetProvider {
public final String LOG_TAG = DetailWidgetProvider.class.getSimpleName();
public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) {
// Perform this loop procedure for each App Widget that belongs to this provider
for (int appWidgetId : appWidgetIds) {
RemoteViews views = new RemoteViews(context.getPackageName(), R.layout.widget_detail);
// Create an Intent to launch MainActivity
Intent intent = new Intent(context, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(context, 0, intent, 0);
views.setOnClickPendingIntent(R.id.widget, pendingIntent);
String location = Utility.getCurrentAddress(context);
Log.d(LOG_TAG, "위젯에서 현재 위치는:" + location);
views.setTextViewText(R.id.widget_item_address_textview, location);
// Set up the collection
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
setRemoteAdapter(context, views);
} else {
setRemoteAdapterV11(context, views);
}
boolean useDetailActivity = context.getResources()
.getBoolean(R.bool.use_detail_activity);
Intent clickIntentTemplate = useDetailActivity
? new Intent(context, DetailActivity.class)
: new Intent(context, MainActivity.class);
PendingIntent clickPendingIntentTemplate = TaskStackBuilder.create(context)
.addNextIntentWithParentStack(clickIntentTemplate)
.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT);
views.setPendingIntentTemplate(R.id.widget_list, clickPendingIntentTemplate);
views.setEmptyView(R.id.widget_list, R.id.widget_empty);
// Tell the AppWidgetManager to perform an update on the current app widget
appWidgetManager.updateAppWidget(appWidgetId, views);
}
}
@Override
public void onReceive(@NonNull Context context, @NonNull Intent intent) {
super.onReceive(context, intent);
if (SunshineSyncAdapter.ACTION_DATA_UPDATED.equals(intent.getAction())) {
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(context);
int[] appWidgetIds = appWidgetManager.getAppWidgetIds(
new ComponentName(context, getClass()));
appWidgetManager.notifyAppWidgetViewDataChanged(appWidgetIds, R.id.widget_list);
}
}
/**
* Sets the remote adapter used to fill in the list items
*
* @param views RemoteViews to set the RemoteAdapter
*/
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
private void setRemoteAdapter(Context context, @NonNull final RemoteViews views) {
views.setRemoteAdapter(R.id.widget_list,
new Intent(context, DetailWidgetRemoteViewsService.class));
}
/**
* Sets the remote adapter used to fill in the list items
*
* @param views RemoteViews to set the RemoteAdapter
*/
@SuppressWarnings("deprecation")
private void setRemoteAdapterV11(Context context, @NonNull final RemoteViews views) {
views.setRemoteAdapter(0, R.id.widget_list,
new Intent(context, DetailWidgetRemoteViewsService.class));
}
}
|
chayoungrock/weather
|
app/src/main/java/com/example/android/sunshine/app/widget/DetailWidgetProvider.java
|
Java
|
apache-2.0
| 4,337 |
/*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Protractor configuration file, see link for more information
// https://github.com/angular/protractor/blob/master/lib/config.ts
const { SpecReporter } = require('jasmine-spec-reporter');
exports.config = {
allScriptsTimeout: 11000,
specs: [
'./e2e/**/*.e2e-spec.ts'
],
capabilities: {
'browserName': 'chrome'
},
directConnect: true,
baseUrl: 'http://localhost:4200/',
framework: 'jasmine',
jasmineNodeOpts: {
showColors: true,
defaultTimeoutInterval: 30000,
print: function() {}
},
beforeLaunch: function() {
require('ts-node').register({
project: 'e2e/tsconfig.e2e.json'
});
},
onPrepare() {
jasmine.getEnv().addReporter(new SpecReporter({ spec: { displayStacktrace: true } }));
}
};
|
gravitee-io/graviteeio-access-management
|
gravitee-am-ui/protractor.conf.js
|
JavaScript
|
apache-2.0
| 1,386 |
package com.myweather.app.activity;
import com.myweather.app.R;
import android.content.Context;
import android.view.LayoutInflater;
import android.widget.LinearLayout;
public class BottomLayout extends LinearLayout{
public BottomLayout(Context context) {
super(context);
LayoutInflater.from(context).inflate(R.layout.bottom_menu, this);
}
}
|
taohaox/MyWeather
|
src/com/myweather/app/activity/BottomLayout.java
|
Java
|
apache-2.0
| 370 |
package com.planet_ink.coffee_mud.MOBS;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2022 Lee H. Fox
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class DrowElf extends StdMOB
{
@Override
public String ID()
{
return "DrowElf";
}
public static final int MALE = 0;
public static final int FEMALE = 1;
public int darkDown=4;
public DrowElf()
{
super();
final Random randomizer = new Random(System.currentTimeMillis());
basePhyStats().setLevel(4 + Math.abs(randomizer.nextInt() % 7));
final int gender = Math.abs(randomizer.nextInt() % 2);
String sex = null;
if (gender == MALE)
sex = "male";
else
sex = "female";
// ===== set the basics
_name="a Drow Elf";
setDescription("a " + sex + " Drow Fighter");
setDisplayText("The drow is armored in black chain mail and carrying a nice arsenal of weapons");
baseState.setHitPoints(CMLib.dice().roll(basePhyStats().level(),20,basePhyStats().level()));
setMoney((int)Math.round(CMath.div((50 * basePhyStats().level()),(randomizer.nextInt() % 10 + 1))));
basePhyStats.setWeight(70 + Math.abs(randomizer.nextInt() % 20));
setWimpHitPoint(5);
basePhyStats().setSpeed(2.0);
basePhyStats().setSensesMask(PhyStats.CAN_SEE_DARK | PhyStats.CAN_SEE_INFRARED);
if(gender == MALE)
baseCharStats().setStat(CharStats.STAT_GENDER,'M');
else
baseCharStats().setStat(CharStats.STAT_GENDER,'F');
baseCharStats().setStat(CharStats.STAT_STRENGTH,12 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,14 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setStat(CharStats.STAT_WISDOM,13 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setStat(CharStats.STAT_DEXTERITY,15 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setStat(CharStats.STAT_CONSTITUTION,12 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setStat(CharStats.STAT_CHARISMA,13 + Math.abs(randomizer.nextInt() % 6));
baseCharStats().setMyRace(CMClass.getRace("Elf"));
baseCharStats().getMyRace().startRacing(this,false);
recoverMaxState();
resetToMaxState();
recoverPhyStats();
recoverCharStats();
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
if((!amDead())&&(tickID==Tickable.TICKID_MOB))
{
if (isInCombat())
{
if((--darkDown)<=0)
{
darkDown=4;
castDarkness();
}
}
}
return super.tick(ticking,tickID);
}
protected boolean castDarkness()
{
if(this.location()==null)
return true;
if(CMLib.flags().isInDark(this.location()))
return true;
Ability dark=CMClass.getAbility("Spell_Darkness");
dark.setProficiency(100);
dark.setSavable(false);
if(this.fetchAbility(dark.ID())==null)
this.addAbility(dark);
else
dark=this.fetchAbility(dark.ID());
if(dark!=null)
dark.invoke(this,null,true,0);
return true;
}
}
|
bozimmerman/CoffeeMud
|
com/planet_ink/coffee_mud/MOBS/DrowElf.java
|
Java
|
apache-2.0
| 4,315 |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.am.common.event;
/**
* @author Jeoffrey HAEYAERT (jeoffrey.haeyaert at graviteesource.com)
* @author GraviteeSource Team
*/
public interface AlertEventKeys {
String PROCESSOR_GEOIP = "geoip";
String PROCESSOR_USERAGENT = "useragent";
String CONTEXT_NODE_ID = "node.id";
String CONTEXT_NODE_HOSTNAME = "node.hostname";
String CONTEXT_NODE_APPLICATION = "node.application";
String CONTEXT_GATEWAY_PORT = "gateway.port";
String PROPERTY_DOMAIN = "domain";
String PROPERTY_APPLICATION = "application";
String PROPERTY_USER = "user";
String PROPERTY_IP = "ip";
String PROPERTY_USER_AGENT = "user_agent";
String PROPERTY_TRANSACTION_ID = "transaction_id";
String PROPERTY_AUTHENTICATION_STATUS = "authentication.status";
String TYPE_AUTHENTICATION = "AUTHENTICATION";
}
|
gravitee-io/graviteeio-access-management
|
gravitee-am-common/src/main/java/io/gravitee/am/common/event/AlertEventKeys.java
|
Java
|
apache-2.0
| 1,476 |
window.onload = function() {
var Synth = function(audiolet) {
AudioletGroup.apply(this, [audiolet, 0, 1]);
// Basic wave
this.saw = new Saw(audiolet, 100);
// Frequency LFO
this.frequencyLFO = new Sine(audiolet, 2);
this.frequencyMA = new MulAdd(audiolet, 10, 100);
// Filter
this.filter = new LowPassFilter(audiolet, 1000);
// Filter LFO
this.filterLFO = new Sine(audiolet, 8);
this.filterMA = new MulAdd(audiolet, 900, 1000);
// Gain envelope
this.gain = new Gain(audiolet);
this.env = new ADSREnvelope(audiolet,
1, // Gate
1.5, // Attack
0.2, // Decay
0.9, // Sustain
2); // Release
// Main signal path
this.saw.connect(this.filter);
this.filter.connect(this.gain);
this.gain.connect(this.outputs[0]);
// Frequency LFO
this.frequencyLFO.connect(this.frequencyMA);
this.frequencyMA.connect(this.saw);
// Filter LFO
this.filterLFO.connect(this.filterMA);
this.filterMA.connect(this.filter, 0, 1);
// Envelope
this.env.connect(this.gain, 0, 1);
};
extend(Synth, AudioletGroup);
var audiolet = new Audiolet();
var synth = new Synth(audiolet);
var frequencyPattern = new PSequence([55, 55, 98, 98, 73, 73, 98, 98],
Infinity);
var filterLFOPattern = new PChoose([2, 4, 6, 8], Infinity);
var gatePattern = new PSequence([1, 0], Infinity);
var patterns = [frequencyPattern, filterLFOPattern, gatePattern];
audiolet.scheduler.play(patterns, 2,
function(frequency, filterLFOFrequency, gate) {
this.frequencyMA.add.setValue(frequency);
this.filterLFO.frequency.setValue(filterLFOFrequency);
this.env.gate.setValue(gate);
}.bind(synth)
);
synth.connect(audiolet.output);
};
|
accraze/Audiolet
|
examples/synth/js/synth.js
|
JavaScript
|
apache-2.0
| 2,107 |
package com.twu.biblioteca.menus;
import com.twu.biblioteca.options.*;
import java.util.ArrayList;
/**
* Created by aloysiusang on 17/6/15.
*/
public class UserMainMenu extends MainMenu {
public UserMainMenu() {
super(new ArrayList<MainMenuOption>() {{
add(new ListBooksOption());
add(new CheckOutBookOption());
add(new ReturnBookOption());
add(new ListMoviesOption());
add(new CheckOutMovieOption());
add(new ReturnMovieOption());
add(new UserInformationOption());
add(new QuitOption());
}});
}
}
|
aloysiusang/twu-biblioteca-aloysiusang
|
src/com/twu/biblioteca/menus/UserMainMenu.java
|
Java
|
apache-2.0
| 624 |
package com.lzh.mdzhihudaily_mvp.presenter;
import android.support.annotation.NonNull;
import com.lzh.mdzhihudaily_mvp.contract.ThemeDailyContract;
import com.lzh.mdzhihudaily_mvp.model.DataRepository;
import com.lzh.mdzhihudaily_mvp.model.Entity.ThemeNews;
import rx.Subscriber;
import rx.Subscription;
/**
* @author lzh
* @desc:
* @date Created on 2017/3/5 23:59
* @github: https://github.com/lisuperhong
*/
public class ThemeDailyPresenter implements ThemeDailyContract.Presenter {
private ThemeDailyContract.View themeDailyView;
private int themeId;
private Subscription subscription;
public ThemeDailyPresenter(@NonNull ThemeDailyContract.View view, int themeId) {
themeDailyView = view;
this.themeId = themeId;
}
@Override
public void start() {
themeDailyView.showLoading();
getThemeNews(false);
}
@Override
public void refreshData() {
getThemeNews(true);
}
private void getThemeNews(final boolean isRefresh) {
unsubscript();
subscription = DataRepository.getInstance()
.getThemeNews(themeId)
.subscribe(new Subscriber<ThemeNews>() {
@Override
public void onCompleted() {
if (isRefresh) {
themeDailyView.stopRefreshLayout();
} else {
themeDailyView.hideLoading();
}
}
@Override
public void onError(Throwable e) {
if (isRefresh) {
themeDailyView.stopRefreshLayout();
} else {
themeDailyView.hideLoading();
}
}
@Override
public void onNext(ThemeNews themeNews) {
themeDailyView.setData(themeNews);
}
});
}
@Override
public void unsubscript() {
if (subscription != null && !subscription.isUnsubscribed()) {
subscription.unsubscribe();
}
}
}
|
lisuperhong/MDZhihuDaily
|
app/src/main/java/com/lzh/mdzhihudaily_mvp/presenter/ThemeDailyPresenter.java
|
Java
|
apache-2.0
| 2,219 |
/*
* ******************************************************************************
* Copyright 2014-2016 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
using System;
namespace Ds3.Runtime
{
public class Ds3RequestException : Exception
{
public Ds3RequestException(string message)
: base(message)
{
}
public Ds3RequestException(string message, Exception innerException)
: base(message, innerException)
{
}
}
}
|
rpmoore/ds3_net_sdk
|
Ds3/Runtime/Ds3RequestException.cs
|
C#
|
apache-2.0
| 1,105 |
using JetBrains.Annotations;
using JetBrains.ReSharper.Plugins.Unity.Yaml.Psi.DeferredCaches.AnimationEventsUsages;
using JetBrains.ReSharper.Plugins.Unity.Yaml.Psi.DeferredCaches.AssetHierarchy.References;
using JetBrains.ReSharper.Psi;
namespace JetBrains.ReSharper.Plugins.Unity.Yaml.Psi.Search
{
public class UnityAnimationEventFindResults : UnityAssetFindResult
{
public UnityAnimationEventFindResults([NotNull] IPsiSourceFile sourceFile,
[NotNull] IDeclaredElement declaredElement,
[NotNull] AnimationUsage usage,
LocalReference owningElementLocation)
: base(sourceFile, declaredElement, owningElementLocation)
{
Usage = usage;
}
[NotNull]
public AnimationUsage Usage { get; }
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
return obj.GetType() == GetType() && Equals((UnityAnimationEventFindResults) obj);
}
private bool Equals([NotNull] UnityAnimationEventFindResults other)
{
return base.Equals(other) && Usage.Equals(other.Usage);
}
public override int GetHashCode()
{
unchecked
{
return (base.GetHashCode() * 397) ^ Usage.GetHashCode();
}
}
}
}
|
JetBrains/resharper-unity
|
resharper/resharper-unity/src/Unity/Yaml/Psi/Search/UnityAnimationEventFindResults.cs
|
C#
|
apache-2.0
| 1,533 |
(function ($) {
"use strict";
/*----------------------------
price-slider active
------------------------------ */
var range = $('#slider-range');
var amount = $('#amount');
range.slider({
range: true,
min: 2,
max: 300,
values: [ 2, 300 ],
slide: function( event, ui ) {
amount.val( "$" + ui.values[ 0 ] + " - $" + ui.values[ 1 ] );
}
});
amount.val( "$" + range.slider( "values", 0 ) +
" - $" + range.slider( "values", 1 ) );
/*----------------------------
jQuery MeanMenu
------------------------------ */
jQuery('#mobile-menu-active').meanmenu();
/*----------------------
Carousel Activation
----------------------*/
$(".let_new_carasel").owlCarousel({
autoPlay: true,
slideSpeed:2000,
pagination:true,
navigation:true,
items : 1,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-caret-left'></i>","<i class='fa fa-caret-right'></i>"],
itemsDesktop : [1199,1],
itemsDesktopSmall : [980,1],
itemsTablet: [768,1],
itemsMobile : [767,1],
});
/*----------------------------
Tooltip
------------------------------ */
$('[data-toggle="tooltip"]').tooltip({
animated: 'fade',
placement: 'top',
container: 'body'
});
/*----------------------------
single portfolio activation
------------------------------ */
$(".sub_pix").owlCarousel({
autoPlay: true,
slideSpeed:2000,
pagination:true,
navigation:false,
items : 5,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,4],
itemsDesktopSmall : [980,3],
itemsTablet: [768,5],
itemsMobile : [767,3],
});
/*----------------------------
toggole active
------------------------------ */
$( ".all_catagories" ).on("click", function() {
$( ".cat_mega_start" ).slideToggle( "slow" );
});
$( ".showmore-items" ).on("click", function() {
$( ".cost-menu" ).slideToggle( "slow" );
});
/*----------------------
New Products Carousel Activation
----------------------*/
$(".whole_product").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 3,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,3],
itemsDesktopSmall : [980,3],
itemsTablet: [768,1],
itemsMobile : [767,1],
});
/*----------------------
Hot Deals Carousel Activation
----------------------*/
$(".new_cosmatic").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 1,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,1],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*---------------------
countdown
--------------------- */
$('[data-countdown]').each(function() {
var $this = $(this), finalDate = $(this).data('countdown');
$this.countdown(finalDate, function(event) {
$this.html(event.strftime('<span class="cdown days"><span class="time-count">%-D</span> <p>Days</p></span> <span class="cdown hour"><span class="time-count">%-H</span> <p>Hour</p></span> <span class="cdown minutes"><span class="time-count">%M</span> <p>Min</p></span> <span class="cdown second"> <span><span class="time-count">%S</span> <p>Sec</p></span>'));
});
});
/*----------------------
Products Catagory Carousel Activation
----------------------*/
$(".feature-carousel").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 4,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,3],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*----------------------------
Top Rate Carousel Activation
------------------------------ */
$(".all_ayntex").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 1,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,1],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*----------------------------
Featured Catagories Carousel Activation
------------------------------ */
$(".achard_all").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 5,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,4],
itemsDesktopSmall : [980,3],
itemsTablet: [768,4],
itemsMobile : [767,2
],
});
/*----------------------------
Blog Post Carousel Activation
------------------------------ */
$(".blog_carasel").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 3,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,2],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*----------------------------
Brand Logo Carousel Activation
------------------------------ */
$(".all_brand").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 6,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,4],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [480,2],
});
/*----------------------
scrollUp
----------------------*/
$.scrollUp({
scrollText: '<i class="fa fa-angle-double-up"></i>',
easingType: 'linear',
scrollSpeed: 900,
animation: 'fade'
});
/*----------------------
New Products home-page-2 Carousel Activation
----------------------*/
$(".product_2").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 4,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,3],
itemsDesktopSmall : [980,4],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*----------------------------
Blog Post home-page-2 Carousel Activation
------------------------------ */
$(".blog_new_carasel_2").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 2,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,2],
itemsDesktopSmall : [980,2],
itemsTablet: [768,1],
itemsMobile : [767,1],
});
/*----------------------------
Products Catagory-2 Carousel Activation
------------------------------ */
$(".feature-carousel-2").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 2,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,2],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*----------------------------
Blog Post home-page-3 Carousel Activation
------------------------------ */
$(".blog_carasel_5").owlCarousel({
autoPlay: false,
slideSpeed:2000,
pagination:false,
navigation:true,
items : 4,
/* transitionStyle : "fade", */ /* [This code for animation ] */
navigationText:["<i class='fa fa-angle-left'></i>","<i class='fa fa-angle-right'></i>"],
itemsDesktop : [1199,4],
itemsDesktopSmall : [980,3],
itemsTablet: [768,2],
itemsMobile : [767,1],
});
/*-----------------------------
Category Menu toggle
-------------------------------*/
$('.expandable a').on('click', function() {
$(this).parent().find('.category-sub').toggleClass('submenu-active');
$(this).toggleClass('submenu-active');
return false;
});
/*----------------------------
MixItUp:
------------------------------ */
$('#Container') .mixItUp();
/*----------------------------
magnificPopup:
------------------------------ */
$('.magnify').magnificPopup({type:'image'});
/*-------------------------
Create an account toggle function
--------------------------*/
$( "#cbox" ).on("click", function() {
$( "#cbox_info" ).slideToggle(900);
});
$( '#showlogin, #showcoupon' ).on('click', function() {
$(this).parent().next().slideToggle(600);
});
/*-------------------------
accordion toggle function
--------------------------*/
$('.payment-accordion').find('.payment-accordion-toggle').on('click', function(){
//Expand or collapse this panel
$(this).next().slideToggle(500);
//Hide the other panels
$(".payment-content").not($(this).next()).slideUp(500);
});
/* -------------------------------------------------------
accordion active class for style
----------------------------------------------------------*/
$('.payment-accordion-toggle').on('click', function(event) {
$(this).siblings('.active').removeClass('active');
$(this).addClass('active');
event.preventDefault();
});
})(jQuery);
|
TZClub/OMIPlatform
|
shopping-platfrom/src/main/webapp/resources/js/main.js
|
JavaScript
|
apache-2.0
| 10,332 |
/*
* Copyright (c) 2010, Christophe Souvignier.
* Copyright (c) 2010, Paul Merlin.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.n0pe.mojo.asadmin;
import java.util.Iterator;
import java.util.Map;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.n0pe.asadmin.AsAdminCmdList;
import org.n0pe.asadmin.commands.CreateJdbcConnectionPool;
/**
* @goal create-jdbc-connection-pool
*/
public class CreateJdbcConnectionPoolMojo
extends AbstractAsadminMojo
{
/**
* @parameter default-value="org.apache.derby.jdbc.ClientXADataSource"
* @required
*/
private String poolDataSource;
/**
* @parameter
* @required
*/
private String poolName;
/**
* @parameter default-value="javax.sql.XADataSource"
* @required
*/
private String restype;
/**
* @parameter
*/
private Map poolProperties;
@Override
protected AsAdminCmdList getAsCommandList()
throws MojoExecutionException, MojoFailureException
{
getLog().info( "Creating auth realm: " + poolName );
final AsAdminCmdList list = new AsAdminCmdList();
final CreateJdbcConnectionPool cmd = new CreateJdbcConnectionPool( poolName ).withDataSource( poolDataSource ).withRestype( restype );
if( poolProperties != null && !poolProperties.isEmpty() )
{
final Iterator it = poolProperties.keySet().iterator();
while( it.hasNext() )
{
final String key = (String) it.next();
cmd.addProperty( key, (String) poolProperties.get( key ) );
}
}
list.add( cmd );
return list;
}
}
|
eskatos/asadmin
|
asadmin-maven-plugin/src/main/java/org/n0pe/mojo/asadmin/CreateJdbcConnectionPoolMojo.java
|
Java
|
apache-2.0
| 2,248 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/imagebuilder/model/ComponentState.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace imagebuilder
{
namespace Model
{
ComponentState::ComponentState() :
m_status(ComponentStatus::NOT_SET),
m_statusHasBeenSet(false),
m_reasonHasBeenSet(false)
{
}
ComponentState::ComponentState(JsonView jsonValue) :
m_status(ComponentStatus::NOT_SET),
m_statusHasBeenSet(false),
m_reasonHasBeenSet(false)
{
*this = jsonValue;
}
ComponentState& ComponentState::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("status"))
{
m_status = ComponentStatusMapper::GetComponentStatusForName(jsonValue.GetString("status"));
m_statusHasBeenSet = true;
}
if(jsonValue.ValueExists("reason"))
{
m_reason = jsonValue.GetString("reason");
m_reasonHasBeenSet = true;
}
return *this;
}
JsonValue ComponentState::Jsonize() const
{
JsonValue payload;
if(m_statusHasBeenSet)
{
payload.WithString("status", ComponentStatusMapper::GetNameForComponentStatus(m_status));
}
if(m_reasonHasBeenSet)
{
payload.WithString("reason", m_reason);
}
return payload;
}
} // namespace Model
} // namespace imagebuilder
} // namespace Aws
|
aws/aws-sdk-cpp
|
aws-cpp-sdk-imagebuilder/source/model/ComponentState.cpp
|
C++
|
apache-2.0
| 1,432 |
/*
* Licensed to the Technische Universität Darmstadt under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The Technische Universität Darmstadt
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.api.dao.export.exporters;
import static org.apache.commons.io.FileUtils.copyInputStreamToFile;
import static org.apache.commons.io.FileUtils.forceMkdir;
import java.io.File;
import java.io.IOException;
import java.util.Enumeration;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import de.tudarmstadt.ukp.clarin.webanno.api.ProjectService;
import de.tudarmstadt.ukp.clarin.webanno.api.export.ProjectExportRequest;
import de.tudarmstadt.ukp.clarin.webanno.api.export.ProjectExportTaskMonitor;
import de.tudarmstadt.ukp.clarin.webanno.api.export.ProjectExporter;
import de.tudarmstadt.ukp.clarin.webanno.api.export.ProjectImportRequest;
import de.tudarmstadt.ukp.clarin.webanno.export.model.ExportedProject;
import de.tudarmstadt.ukp.clarin.webanno.model.Project;
import de.tudarmstadt.ukp.clarin.webanno.support.ZipUtils;
@Component
public class GuildelinesExporter
implements ProjectExporter
{
public static final String GUIDELINE = "guideline";
private static final String GUIDELINES_FOLDER = "/" + GUIDELINE;
private final Logger log = LoggerFactory.getLogger(getClass());
private @Autowired ProjectService projectService;
/**
* Copy Project guidelines from the file system of this project to the export folder
*/
@Override
public void exportData(ProjectExportRequest aRequest, ProjectExportTaskMonitor aMonitor,
ExportedProject aExProject, File aStage)
throws Exception
{
File guidelineDir = new File(aStage + GUIDELINES_FOLDER);
FileUtils.forceMkdir(guidelineDir);
File annotationGuidlines = projectService.getGuidelinesFolder(aRequest.getProject());
if (annotationGuidlines.exists()) {
for (File annotationGuideline : annotationGuidlines.listFiles()) {
FileUtils.copyFileToDirectory(annotationGuideline, guidelineDir);
}
}
}
/**
* Copy guidelines from the exported project
*
* @param aZip
* the ZIP file.
* @param aProject
* the project.
* @throws IOException
* if an I/O error occurs.
*/
@Override
public void importData(ProjectImportRequest aRequest, Project aProject,
ExportedProject aExProject, ZipFile aZip)
throws Exception
{
for (Enumeration<? extends ZipEntry> zipEnumerate = aZip.entries(); zipEnumerate
.hasMoreElements();) {
ZipEntry entry = (ZipEntry) zipEnumerate.nextElement();
// Strip leading "/" that we had in ZIP files prior to 2.0.8 (bug #985)
String entryName = ZipUtils.normalizeEntryName(entry);
if (entryName.startsWith(GUIDELINE + "/")) {
String fileName = FilenameUtils.getName(entry.getName());
if (fileName.trim().isEmpty()) {
continue;
}
File guidelineDir = projectService.getGuidelinesFolder(aProject);
forceMkdir(guidelineDir);
copyInputStreamToFile(aZip.getInputStream(entry), new File(guidelineDir, fileName));
log.info("Imported guideline [" + fileName + "] for project [" + aProject.getName()
+ "] with id [" + aProject.getId() + "]");
}
}
}
}
|
webanno/webanno
|
webanno-api-dao/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/dao/export/exporters/GuildelinesExporter.java
|
Java
|
apache-2.0
| 4,420 |
# Copyright (c) 2016. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division, absolute_import
from .effect_collection import EffectCollection
from .effect_ordering import (
effect_priority,
top_priority_effect,
)
from .effect_prediction import (
predict_variant_effects,
predict_variant_effect_on_transcript,
predict_variant_effect_on_transcript_or_failure,
)
from .effect_classes import (
MutationEffect,
TranscriptMutationEffect,
NonsilentCodingMutation,
Failure,
IncompleteTranscript,
Intergenic,
Intragenic,
NoncodingTranscript,
Intronic,
ThreePrimeUTR,
FivePrimeUTR,
Silent,
Substitution,
Insertion,
Deletion,
ComplexSubstitution,
AlternateStartCodon,
IntronicSpliceSite,
ExonicSpliceSite,
StopLoss,
SpliceDonor,
SpliceAcceptor,
PrematureStop,
FrameShiftTruncation,
StartLoss,
FrameShift,
ExonLoss,
)
__all__ = [
"EffectCollection",
# effect ordering
"effect_priority",
"top_priority_effect",
# prediction functions
"predict_variant_effects",
"predict_variant_effect_on_transcript",
"predict_variant_effect_on_transcript_or_failure",
# effect classes
"MutationEffect",
"TranscriptMutationEffect",
"Failure",
"IncompleteTranscript",
"Intergenic",
"Intragenic",
"IncompleteTranscript",
"NoncodingTranscript",
"ThreePrimeUTR",
"FivePrimeUTR",
"Intronic",
"Silent",
"NonsilentCodingMutation",
"Substitution",
"Insertion",
"Deletion",
"ComplexSubstitution",
"AlternateStartCodon",
"IntronicSpliceSite",
"ExonicSpliceSite",
"StopLoss",
"SpliceDonor",
"SpliceAcceptor",
"PrematureStop",
"FrameShiftTruncation",
"StartLoss",
"FrameShift",
"ExonLoss",
]
|
hammerlab/varcode
|
varcode/effects/__init__.py
|
Python
|
apache-2.0
| 2,401 |
using System.Data.Common;
using System.Data.Entity.Infrastructure.Interception;
namespace Advertise.Utility.Persians
{
/// <summary>
/// </summary>
public class PersianYeKeInterceptor : IDbCommandInterceptor
{
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void ReaderExecuting(DbCommand command, DbCommandInterceptionContext<DbDataReader> interceptionContext)
{
command.ApplyCorrectYeKe();
}
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void NonQueryExecuted(DbCommand command, DbCommandInterceptionContext<int> interceptionContext)
{
}
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void NonQueryExecuting(DbCommand command, DbCommandInterceptionContext<int> interceptionContext)
{
command.ApplyCorrectYeKe();
}
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void ReaderExecuted(DbCommand command, DbCommandInterceptionContext<DbDataReader> interceptionContext)
{
}
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void ScalarExecuted(DbCommand command, DbCommandInterceptionContext<object> interceptionContext)
{
}
/// <summary>
/// </summary>
/// <param name="command"></param>
/// <param name="interceptionContext"></param>
public void ScalarExecuting(DbCommand command, DbCommandInterceptionContext<object> interceptionContext)
{
command.ApplyCorrectYeKe();
}
}
}
|
imangit/Advertise
|
Advertise/Advertise.Utility/Persians/PersianYeKeInterceptor.cs
|
C#
|
apache-2.0
| 2,025 |
package org.wikipedia.edit;
import android.support.annotation.Nullable;
import org.wikipedia.dataclient.mwapi.MwPostResponse;
class Edit extends MwPostResponse {
@SuppressWarnings("unused,") @Nullable private Result edit;
@Nullable Result edit() {
return edit;
}
boolean hasEditResult() {
return edit != null;
}
class Result {
@SuppressWarnings("unused") @Nullable private String result;
@SuppressWarnings("unused") private int newrevid;
@SuppressWarnings("unused") @Nullable private Captcha captcha;
@SuppressWarnings("unused") @Nullable private String code;
@SuppressWarnings("unused") @Nullable private String spamblacklist;
@Nullable String status() {
return result;
}
int newRevId() {
return newrevid;
}
@Nullable String captchaId() {
return captcha == null ? null : captcha.id();
}
boolean hasErrorCode() {
return code != null;
}
boolean hasCaptchaResponse() {
return captcha != null;
}
@Nullable String spamblacklist() {
return spamblacklist;
}
boolean hasSpamBlacklistResponse() {
return spamblacklist != null;
}
}
private static class Captcha {
@SuppressWarnings("unused") @Nullable private String id;
@Nullable String id() {
return id;
}
}
}
|
anirudh24seven/apps-android-wikipedia
|
app/src/main/java/org/wikipedia/edit/Edit.java
|
Java
|
apache-2.0
| 1,491 |
require('./second.js');
var i = 0;
console.log('Hello Webpack!');
console.log('Webpack is cool.');
|
jitendraag/webpack-2-examples
|
example2/input.js
|
JavaScript
|
apache-2.0
| 99 |
CONTACT_HANDLE = 'contact'
NEW_CONTACT_HANDLE = 'new_contact'
OTHER_CONTACT_HANDLE = 'other_contact'
BLANK_CONTACT_HANDLE = ''
NON_EXISTING_CONTACT_HANDLE = 'non_existing'
def contact_does_not_exist handle = CONTACT_HANDLE
contact = Contact.find_by(handle: handle)
contact.delete if contact
end
def contact_exists handle = CONTACT_HANDLE, under: nil
contact_does_not_exist handle
other_partner = under
partner = other_partner ? Partner.find_by(name: under) : @current_partner
create :contact, partner: partner, handle: handle
end
def other_contact_exists
contact_exists OTHER_CONTACT_HANDLE
end
def create_contact with: { partner: nil, json_request: nil }
params = with
json_request ||= { handle: CONTACT_HANDLE }
json_request[:partner] = NON_ADMIN_PARTNER if @current_user.admin?
json_request[:partner] = params[:partner] if params[:partner]
json_request = params[:json_request] if params[:json_request]
post contacts_url, json_request
end
def update_contact with: { handle: nil, partner: nil }
params = with
json_request = {
name: 'new_name',
organization: 'new_organization',
street: 'new_street',
street2: 'new_street2',
street3: 'new_street3',
city: 'new_city',
state: 'new_state',
postal_code: 'new_postal_code',
country_code: 'new_country',
local_name: 'New local name',
local_organization: 'New local organization',
local_street: 'New local street',
local_street2: 'New local street 2',
local_street3: 'New local street 3',
local_city: 'New local city',
local_state: 'New local state',
local_postal_code: 'New local postal code',
local_country_code: 'New local country code',
voice: 'new_phone',
voice_ext: '1234',
fax: 'new_fax',
fax_ext: '1234',
email: 'new_email@contact.ph',
}
json_request[:handle] = params[:handle] if params[:handle]
json_request[:partner] = params[:partner] if params[:partner]
patch contact_path(CONTACT_HANDLE), json_request
end
def assert_contact_created
assert_response_status_must_be_created
expected_response = {
handle: CONTACT_HANDLE,
name: nil,
organization: nil,
street: nil,
street2: nil,
street3: nil,
city: nil,
state: nil,
postal_code: nil,
country_code: nil,
local_name: nil,
local_organization: nil,
local_street: nil,
local_street2: nil,
local_street3: nil,
local_city: nil,
local_state: nil,
local_postal_code: nil,
local_country_code: nil,
voice: nil,
voice_ext: nil,
fax: nil,
fax_ext: nil,
email: nil,
}
json_response.must_equal expected_response
Contact.find_by(handle: CONTACT_HANDLE).wont_be_nil
end
def assert_create_contact_history_created
assert_contact_history_created
end
def assert_update_contact_history_created
assert_contact_history_created count: 2
end
def assert_contact_history_created count: 1
contact = Contact.find_by(handle: CONTACT_HANDLE)
contact.contact_histories.count.must_equal count
assert_contact_history contact.contact_histories.last, contact
end
def assert_contact_history contact_history, contact
contact_history.handle.must_equal contact.handle
contact_history.partner.must_equal contact.partner
contact_history.name.must_equal contact.name
contact_history.organization.must_equal contact.organization
contact_history.street.must_equal contact.street
contact_history.street2.must_equal contact.street2
contact_history.street3.must_equal contact.street3
contact_history.city.must_equal contact.city
contact_history.state.must_equal contact.state
contact_history.postal_code.must_equal contact.postal_code
contact_history.country_code.must_equal contact.country_code
contact_history.local_name.must_equal contact.local_name
contact_history.local_organization.must_equal contact.local_organization
contact_history.local_street.must_equal contact.local_street
contact_history.local_street2.must_equal contact.local_street2
contact_history.local_street3.must_equal contact.local_street3
contact_history.local_city.must_equal contact.local_city
contact_history.local_state.must_equal contact.local_state
contact_history.local_postal_code.must_equal contact.local_postal_code
contact_history.local_country_code.must_equal contact.local_country_code
contact_history.voice.must_equal contact.voice
contact_history.voice_ext.must_equal contact.voice_ext
contact_history.fax.must_equal contact.fax
contact_history.fax_ext.must_equal contact.fax_ext
contact_history.email.must_equal contact.email
end
def assert_contact_updated
assert_response_status_must_be_ok
expected_response = {
handle: CONTACT_HANDLE,
name: 'new_name',
organization: 'new_organization',
street: 'new_street',
street2: 'new_street2',
street3: 'new_street3',
city: 'new_city',
state: 'new_state',
postal_code: 'new_postal_code',
country_code: 'new_country',
local_name: 'New local name',
local_organization: 'New local organization',
local_street: 'New local street',
local_street2: 'New local street 2',
local_street3: 'New local street 3',
local_city: 'New local city',
local_state: 'New local state',
local_postal_code: 'New local postal code',
local_country_code: 'New local country code',
voice: 'new_phone',
voice_ext: '1234',
fax: 'new_fax',
fax_ext: '1234',
email: 'new_email@contact.ph'
}
json_response.must_equal expected_response
end
def assert_contacts_displayed
assert_response_status_must_be_ok
json_response.length.must_equal 2
json_response.must_equal contacts_response
end
def assert_no_contacts_displayed
assert_response_status_must_be_ok
json_response.length.must_equal 0
end
def view_contacts
get contacts_path
end
def contacts_response
[
{:handle=>"contact", :name=>nil, :organization=>nil, :street=>nil, :street2=>nil, :street3=>nil, :city=>nil, :state=>nil, :postal_code=>nil, :country_code=>nil, :local_name=>nil, :local_organization=>nil, :local_street=>nil, :local_street2=>nil, :local_street3=>nil, :local_city=>nil, :local_state=>nil, :local_postal_code=>nil, :local_country_code=>nil, :voice=>nil, :voice_ext=>nil, :fax=>nil, :fax_ext=>nil, :email=>nil},
{:handle=>"other_contact", :name=>nil, :organization=>nil, :street=>nil, :street2=>nil, :street3=>nil, :city=>nil, :state=>nil, :postal_code=>nil, :country_code=>nil, :local_name=>nil, :local_organization=>nil, :local_street=>nil, :local_street2=>nil, :local_street3=>nil, :local_city=>nil, :local_state=>nil, :local_postal_code=>nil, :local_country_code=>nil, :voice=>nil, :voice_ext=>nil, :fax=>nil, :fax_ext=>nil, :email=>nil}
]
end
|
dotph/registry
|
features/services/contact_service.rb
|
Ruby
|
apache-2.0
| 6,724 |
/*
* Copyright 2014-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.dockside.dbflute.bsbhv.pmbean;
import java.util.*;
import org.dbflute.outsidesql.paging.SimplePagingBean;
import org.dbflute.outsidesql.typed.*;
import org.dbflute.jdbc.*;
import org.dbflute.cbean.coption.LikeSearchOption;
import org.dbflute.outsidesql.PmbCustodial;
import org.dbflute.util.DfTypeUtil;
import org.docksidestage.dockside.dbflute.allcommon.*;
import org.docksidestage.dockside.dbflute.exbhv.*;
import org.docksidestage.dockside.dbflute.exentity.customize.*;
/**
* The base class for typed parameter-bean of PurchaseMaxPriceMember. <br>
* This is related to "<span style="color: #AD4747">selectPurchaseMaxPriceMember</span>" on MemberBhv.
* @author DBFlute(AutoGenerator)
*/
public class BsPurchaseMaxPriceMemberPmb extends SimplePagingBean implements EntityHandlingPmb<MemberBhv, PurchaseMaxPriceMember>, ManualPagingHandlingPmb<MemberBhv, PurchaseMaxPriceMember>, FetchBean {
// ===================================================================================
// Attribute
// =========
/** The parameter of memberId. */
protected Integer _memberId;
/** The parameter of memberNameList:likePrefix. */
protected List<String> _memberNameList;
/** The option of like-search for memberNameList. */
protected LikeSearchOption _memberNameListInternalLikeSearchOption;
/** The parameter of memberStatusCodeList:cls(MemberStatus). */
protected List<org.docksidestage.dockside.dbflute.allcommon.CDef.MemberStatus> _memberStatusCodeList;
/** The time-zone for filtering e.g. from-to. (NullAllowed: if null, default zone) */
protected TimeZone _timeZone;
// ===================================================================================
// Constructor
// ===========
/**
* Constructor for the typed parameter-bean of PurchaseMaxPriceMember. <br>
* This is related to "<span style="color: #AD4747">selectPurchaseMaxPriceMember</span>" on MemberBhv.
*/
public BsPurchaseMaxPriceMemberPmb() {
if (DBFluteConfig.getInstance().isPagingCountLater()) {
enablePagingCountLater();
}
}
// ===================================================================================
// Typed Implementation
// ====================
/**
* {@inheritDoc}
*/
public String getOutsideSqlPath() { return "selectPurchaseMaxPriceMember"; }
/**
* Get the type of an entity for result. (implementation)
* @return The type instance of an entity, customize entity. (NotNull)
*/
public Class<PurchaseMaxPriceMember> getEntityType() { return PurchaseMaxPriceMember.class; }
// ===================================================================================
// Assist Helper
// =============
// -----------------------------------------------------
// String
// ------
protected String filterStringParameter(String value) { return isEmptyStringParameterAllowed() ? value : convertEmptyToNull(value); }
protected boolean isEmptyStringParameterAllowed() { return DBFluteConfig.getInstance().isEmptyStringParameterAllowed(); }
protected String convertEmptyToNull(String value) { return PmbCustodial.convertEmptyToNull(value); }
protected void assertLikeSearchOptionValid(String name, LikeSearchOption option) { PmbCustodial.assertLikeSearchOptionValid(name, option); }
// -----------------------------------------------------
// Date
// ----
protected Date toUtilDate(Object date) { return PmbCustodial.toUtilDate(date, _timeZone); }
protected <DATE> DATE toLocalDate(Date date, Class<DATE> localType) { return PmbCustodial.toLocalDate(date, localType, chooseRealTimeZone()); }
protected TimeZone chooseRealTimeZone() { return PmbCustodial.chooseRealTimeZone(_timeZone); }
/**
* Set time-zone, basically for LocalDate conversion. <br>
* Normally you don't need to set this, you can adjust other ways. <br>
* (DBFlute system's time-zone is used as default)
* @param timeZone The time-zone for filtering. (NullAllowed: if null, default zone)
*/
public void zone(TimeZone timeZone) { _timeZone = timeZone; }
// -----------------------------------------------------
// by Option Handling
// ------------------
// might be called by option handling
protected <NUMBER extends Number> NUMBER toNumber(Object obj, Class<NUMBER> type) { return PmbCustodial.toNumber(obj, type); }
protected Boolean toBoolean(Object obj) { return PmbCustodial.toBoolean(obj); }
@SuppressWarnings("unchecked")
protected <ELEMENT> ArrayList<ELEMENT> newArrayList(ELEMENT... elements) { return PmbCustodial.newArrayList(elements); }
// ===================================================================================
// Basic Override
// ==============
/**
* @return The display string of all parameters. (NotNull)
*/
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(DfTypeUtil.toClassTitle(this)).append(":");
sb.append(xbuildColumnString());
return sb.toString();
}
protected String xbuildColumnString() {
final String dm = ", ";
final StringBuilder sb = new StringBuilder();
sb.append(dm).append(_memberId);
sb.append(dm).append(_memberNameList);
sb.append(dm).append(_memberStatusCodeList);
if (sb.length() > 0) { sb.delete(0, dm.length()); }
sb.insert(0, "{").append("}");
return sb.toString();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] memberId <br>
* // not required / used as equal
* @return The value of memberId. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public Integer getMemberId() {
return _memberId;
}
/**
* [set] memberId <br>
* // not required / used as equal
* @param memberId The value of memberId. (NullAllowed)
*/
public void setMemberId(Integer memberId) {
_memberId = memberId;
}
/**
* [get] memberNameList:likePrefix <br>
* // list of prefix keyword
* @return The value of memberNameList. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public List<String> getMemberNameList() {
return _memberNameList;
}
/**
* [set as prefixSearch] memberNameList:likePrefix <br>
* // list of prefix keyword
* @param memberNameList The value of memberNameList. (NullAllowed)
*/
public void setMemberNameList_PrefixSearch(List<String> memberNameList) {
_memberNameList = memberNameList;
_memberNameListInternalLikeSearchOption = new LikeSearchOption().likePrefix();
}
/**
* Get the internal option of likeSearch for memberNameList. {Internal Method: Don't invoke this}
* @return The internal option of likeSearch for memberNameList. (NullAllowed)
*/
public LikeSearchOption getMemberNameListInternalLikeSearchOption() {
return _memberNameListInternalLikeSearchOption;
}
/**
* [get] memberStatusCodeList:cls(MemberStatus) <br>
* @return The value of memberStatusCodeList. (NullAllowed, NotEmptyString(when String): if empty string, returns null)
*/
public List<org.docksidestage.dockside.dbflute.allcommon.CDef.MemberStatus> getMemberStatusCodeList() {
return _memberStatusCodeList;
}
/**
* [set] memberStatusCodeList:cls(MemberStatus) <br>
* @param memberStatusCodeList The value of memberStatusCodeList. (NullAllowed)
*/
public void setMemberStatusCodeList(List<org.docksidestage.dockside.dbflute.allcommon.CDef.MemberStatus> memberStatusCodeList) {
_memberStatusCodeList = memberStatusCodeList;
}
}
|
dbflute-test/dbflute-test-active-dockside
|
src/main/java/org/docksidestage/dockside/dbflute/bsbhv/pmbean/BsPurchaseMaxPriceMemberPmb.java
|
Java
|
apache-2.0
| 9,658 |
package com.annimon.stream.longstreamtests;
import com.annimon.stream.LongStream;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
public final class SumTest {
@Test
public void testSum() {
assertThat(LongStream.of(100, 20, 3).sum(), is(123L));
assertThat(LongStream.empty().sum(), is(0L));
}
}
|
aNNiMON/Lightweight-Stream-API
|
stream/src/test/java/com/annimon/stream/longstreamtests/SumTest.java
|
Java
|
apache-2.0
| 393 |
package de.fhg.iais.cortex.services.institution;
import java.util.LinkedList;
import java.util.List;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.annotate.JsonPropertyOrder;
import org.codehaus.jackson.map.annotate.JsonSerialize;
@JsonPropertyOrder({
"id", "name", "sector", "latitude", "longitude", "locationDisplayName", "hasItems", "numberOfItems", "children", "level", "detailViewUri"
})
@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
public class Institution {
@JsonProperty("id")
private final String id;
@JsonProperty("name")
private final String name;
@JsonProperty("sector")
private final String sector;
@JsonProperty("latitude")
private final String latitude;
@JsonProperty("longitude")
private final String longitude;
@JsonProperty("locationDisplayName")
private final String locationDisplayName;
@JsonProperty("hasItems")
private final boolean hasItems;
@JsonProperty("numberOfItems")
private final long numberOfItems;
@JsonProperty("children")
private final List<Institution> children;
@JsonProperty("level")
private int level;
@JsonProperty("detailViewUri")
private String detailViewUri;
/*
* Only needed for automatic serialization and deserialization.
*/
@SuppressWarnings("unused")
private Institution() {
this(null, null, null, null, null, null, false, 0);
}
public Institution(
String id,
String name,
String sector,
String latitude,
String longitude,
String locationDisplayName,
boolean hasItems,
long numberOfItems) {
super();
this.id = id;
this.name = name;
this.sector = sector;
this.latitude = latitude;
this.longitude = longitude;
this.locationDisplayName = locationDisplayName;
this.hasItems = hasItems;
this.numberOfItems = numberOfItems;
this.children = new LinkedList<Institution>();
this.level = -1;
this.detailViewUri = null;
}
public String getId() {
return this.id;
}
public String getName() {
return this.name;
}
public String getSector() {
return this.sector;
}
public String getLatitude() {
return this.latitude;
}
public String getLongitude() {
return this.longitude;
}
public List<Institution> getChildren() {
return this.children;
}
public void addChild(Institution institution) {
this.children.add(institution);
}
public void setChildren(List<Institution> objects) {
this.children.clear();
this.children.addAll(objects);
}
public int getLevel() {
return this.level;
}
public void setLevel(int level) {
this.level = level;
}
public void setDetailViewUri(String detailViewUri) {
this.detailViewUri = detailViewUri;
}
public String getLocationDisplayName() {
return this.locationDisplayName;
}
public String getDetailViewUri() {
return this.detailViewUri;
}
public boolean hasItems() {
return this.hasItems;
}
public long getNumberOfItems() {
return this.numberOfItems;
}
public Institution copy() {
Institution newInstitution =
new Institution(this.id, this.name, this.sector, this.latitude, this.longitude, this.locationDisplayName, this.hasItems, this.numberOfItems);
for ( Institution inst : this.children ) {
newInstitution.getChildren().add(inst.copy());
}
return newInstitution;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((this.detailViewUri == null) ? 0 : this.detailViewUri.hashCode());
result = (prime * result) + (this.hasItems ? 1231 : 1237);
result = (prime * result) + ((this.id == null) ? 0 : this.id.hashCode());
result = (prime * result) + ((this.latitude == null) ? 0 : this.latitude.hashCode());
result = (prime * result) + this.level;
result = (prime * result) + ((this.locationDisplayName == null) ? 0 : this.locationDisplayName.hashCode());
result = (prime * result) + ((this.longitude == null) ? 0 : this.longitude.hashCode());
result = (prime * result) + ((this.name == null) ? 0 : this.name.hashCode());
result = (prime * result) + (int) (this.numberOfItems ^ (this.numberOfItems >>> 32));
result = (prime * result) + ((this.sector == null) ? 0 : this.sector.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
Institution other = (Institution) obj;
if ( this.detailViewUri == null ) {
if ( other.detailViewUri != null ) {
return false;
}
} else if ( !this.detailViewUri.equals(other.detailViewUri) ) {
return false;
}
if ( this.hasItems != other.hasItems ) {
return false;
}
if ( this.id == null ) {
if ( other.id != null ) {
return false;
}
} else if ( !this.id.equals(other.id) ) {
return false;
}
if ( this.latitude == null ) {
if ( other.latitude != null ) {
return false;
}
} else if ( !this.latitude.equals(other.latitude) ) {
return false;
}
if ( this.level != other.level ) {
return false;
}
if ( this.locationDisplayName == null ) {
if ( other.locationDisplayName != null ) {
return false;
}
} else if ( !this.locationDisplayName.equals(other.locationDisplayName) ) {
return false;
}
if ( this.longitude == null ) {
if ( other.longitude != null ) {
return false;
}
} else if ( !this.longitude.equals(other.longitude) ) {
return false;
}
if ( this.name == null ) {
if ( other.name != null ) {
return false;
}
} else if ( !this.name.equals(other.name) ) {
return false;
}
if ( this.numberOfItems != other.numberOfItems ) {
return false;
}
if ( this.sector == null ) {
if ( other.sector != null ) {
return false;
}
} else if ( !this.sector.equals(other.sector) ) {
return false;
}
return true;
}
@Override
public String toString() {
return "Institution [id="
+ this.id
+ ", name="
+ this.name
+ ", sector="
+ this.sector
+ ", latitude="
+ this.latitude
+ ", longitude="
+ this.longitude
+ ", locationDisplayName="
+ this.locationDisplayName
+ ", hasItems="
+ this.hasItems
+ ", numberOfItems="
+ this.numberOfItems
+ ", children="
+ this.children
+ ", level="
+ this.level
+ ", detailViewUri="
+ this.detailViewUri
+ "]";
}
}
|
Deutsche-Digitale-Bibliothek/ddb-backend
|
CoreServices/src/main/java/de/fhg/iais/cortex/services/institution/Institution.java
|
Java
|
apache-2.0
| 7,623 |